From 98f8e854eb73452b349909dfde3797c07c65d074 Mon Sep 17 00:00:00 2001 From: Theodore Li Date: Thu, 7 May 2026 12:43:05 -0700 Subject: [PATCH 01/33] improvement(tables): extract TablesDetail wrapper, ship trigger followups (#4476) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ui improvements * Update status pils, make checkbox column sticky * add Run workflow to context menu * Refactor dispatching logic * fix checkbox width to be smaller if csv is small * Add drag behavior for workflows, stop workflow on multi select * fix z index of checkbox to left, add view workflow button * Switch to emcn buttons for Add inputs * Split up workflow sidebar from column sidebar, refactor cells * Lint and add auto run toggle * fix column reordering, add action bar * Create and use emcn square * Reconcile post-merge: drop positionMap, use rowId-based selection Staging refactored Tables UI to decouple from DB position (gutter from array index, checkedRows keyed by rowId, no PositionGapRows). Bring HEAD's action-bar / context-menu helpers in line: contextMenuRowIds, selectedRowIds, actionBarRowIds now key off row.id and walk `rows` directly. Drop the maxPosition / positionMap derived state. Collapse COLUMN_SIDEBAR_WIDTH_CSS to a numeric COLUMN_SIDEBAR_WIDTH used by both the sidebar shell and the table's reserved padding-right. Co-Authored-By: Claude Opus 4.7 (1M context) * feat(table): backfill remapped workflow outputs from execution logs When a workflow column is re-pointed to a different (blockId, path), populate its existing rows with the new output's value pulled from saved execution logs instead of leaving them empty until the next run. Rows where the new mapping has no logged value clear (matching the previous behavior for those rows), but rows where the workflow already has the new output's value surface immediately. Refactor backfillAddedGroupOutputs into a generalized backfillGroupOutputsFromLogs helper with an `overwrite` flag — used in both the added-outputs path (preserves hand-edited values) and the new remapped path (overwrites since the new mapping is the source of truth). Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): update column type when remapping workflow output A remap that changes the output's leaf type (string → number, json → boolean, etc.) was leaving the column's declared type stale. The clear- then-backfill flow then failed schema validation on every row, so the backfill silently aborted and the column stayed empty. Resolve the new leaf type via flattenWorkflowOutputs + columnTypeForLeaf for each mappingUpdate, and patch schema.columns[i].type before the schema write. The clear-tx then backfill ordering now works end-to-end across type changes. If the workflow or its target output can't be resolved (workflow deleted, block removed), fall back to leaving the column type alone — the backfill will skip rows whose picked value doesn't match, same as before. Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): stringify objects instead of "[object Object]" in cells If a column's declared type lags its row data (e.g. a workflow column mid-remap, where the schema cache hasn't refetched yet but the row data already has the new mapping's value), formatValueForInput and the cell-render text variant fell through to String(value) and rendered "[object Object]". JSON-stringify objects in both spots so the transient skew shows the actual data. Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): drop extra left border on workflow group meta header The meta cell had border-r/b/l while regular headers have only border-r/b. With border-separate tables, that extra 1px left border shifted the meta cell's content one pixel right of the columns below it. Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): align workflow meta header without dropping its left border Restore border-l and pull the cell back -1px with -ml-px so the visible left border overlaps the previous cell's right border instead of adding 1px to the meta cell's box. Content lines up with the columns below. Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): draw meta header left border via ::before pseudo Adding border-l to the meta cell shifted its content right by 1px because table-fixed + border-separate honors the border inside the colspan'd cell's width budget. -ml-px doesn't work on . Render the visible left edge via a ::before at left: -1px instead — paints over the prior cell's right border without consuming any of the meta cell's content area. Content lines up with the columns below. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): add missing barrels, drop doubled-path imports Match the convention used by logs/components: every component folder exposes its public API via index.ts so consumers import from the folder name, not from its internal filenames. - New barrels: column-config-sidebar/, workflow-sidebar/, table-action-bar/, table/cells/, table/headers/. - Rename table-filter/index.tsx → index.ts (barrel is not a component). - Top-level components/index.ts re-exports every sibling folder so external consumers have one import path. - Replace `from '../foo/foo'` doubled paths in table.tsx with the shorter barrel-anchored form. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): introduce TablesDetail wrapper as thin passthrough Phase 1 step 0 of the wrapper extraction (see plan okay-lets-make-a-shimmying-trinket.md). page.tsx now renders TablesDetail, which today is a passthrough to . Subsequent commits lift surface state out of
into this wrapper one piece at a time. The mothership chat path (
) is untouched —
stays exportable as a lower-level component for embedded contexts. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): lift slideout panel state into TablesDetail wrapper The three right-edge slideout panels (column config, workflow config, execution details) move out of
into the wrapper. The wrapper owns a single useReducer that encodes the at-most-one-open invariant as a discriminated union — opening any one panel automatically closes the others.
emits open requests via three new callback props. Also extract from inline-in-table.tsx to its own folder so the wrapper can compose it cleanly. Update the embedded mothership callsite (resource-content.tsx) to render instead of
. Phase 1 step 1 of the wrapper extraction.
shrinks from 3849 → 3787 lines; grows from 19 → 145 lines. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): lift delete-table modal + mutation into wrapper The delete-table confirmation modal and `useDeleteTable` mutation move out of
into TablesDetail.
exposes a new `onRequestDeleteTable` callback fired by the page-header Delete action. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): lift CSV import dialog into wrapper ImportCsvDialog moves out of
. Grid exposes `onRequestImportCsv` fired by the page-header menu item; wrapper owns the open state and renders the dialog. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): lift RowModal (edit + delete) into wrapper Both RowModal instances move out of
into the wrapper. Grid emits `onOpenRowModal(row)` (Space key) and `onRequestDeleteRows(snapshots)` (context menu). Post-delete cleanup (push undo, clear selection) needs grid-internal state, so the grid populates an `afterDeleteRowsSinkRef` callback that the wrapper's modal `onSuccess` invokes. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): lift delete-columns modal into wrapper The destructive delete-columns confirmation modal moves into the wrapper. Grid emits `onRequestDeleteColumns(names)`; the cascade itself (per-column mutation, undo push, columnOrder + columnWidths cleanup) stays in the grid as a sink the wrapper invokes on confirm — too grid-internal to lift cleanly. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): lift run/stop mutations + TableActionBar to wrapper useRunGroup and useCancelTableRuns move out of
into the wrapper, along with the render. Grid receives onRunGroup, onRunRows, onStopRow, onStopRows, onStopAll, and cancelRunsPending as props — used by the per-row gutter Play/Stop, the workflow-group meta-cell run menu, and the right-click context menu's Run/Stop on selection items. Action-bar selection state (actionBarRowIds, runningInActionBar, hasWorkflowColumns) is derived from grid-internal state, so the grid emits a `SelectionSnapshot` via `onSelectionChange` from a useEffect. Wrapper uses the snapshot to drive the floating . Phase 2 step 1 of the wrapper extraction. Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): lift queryOptions to wrapper queryOptions (filter + sort) moves out of
into the wrapper, making it a single source of truth that drives one useTable call. The wrapper passes the bundle down to the grid; sort/filter handlers in the grid call onQueryOptionsChange. Eliminates the previous double-useTable pattern (one for the grid's filtered/sorted view, one in the wrapper's hardcoded null/null query for sidebar metadata). Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): lift page header (breadcrumbs/options/filter) to wrapper Phase 3 of the wrapper extraction. The full page-header surface moves out of
: - ResourceHeader (breadcrumbs, table-rename UI, headerActions, createTrigger) - ResourceOptionsBar (sort + filter toggle) - TableFilter (filter panel — wrapper owns filterOpen state) - RunStatusControl (in the leading actions when runs are active) useRenameTable + useInlineRename for the breadcrumb name move to the wrapper. The grid populates pushTableRenameUndoSinkRef so the rename is still part of the grid's undo stack. Extract NewColumnDropdown and RunStatusControl from inline-in-table.tsx to their own folders so the wrapper composes them cleanly without reaching into the grid's internals. Hoist generateColumnName from grid-internal useCallback to a shared util so both the page-header and inline-header NewColumnDropdowns use the same logic. After this lift
is the data grid only — no page surface, no modals, no slideouts, no breadcrumbs. The selection snapshot now includes totalRunning so the wrapper can render the page-header RunStatusControl from outside the grid. Co-Authored-By: Claude Opus 4.7 (1M context) * chore(tables): cleanup pass on TablesDetail wrapper extraction Six-pass cleanup against the wrapper extraction diff: - Effects: add content-compare bailout to onSelectionChange emit so unchanged snapshots don't churn wrapper re-renders. - Memos: drop unnecessary activeSortState memo, fold into sortConfig. - Callbacks: remove ~10 useCallbacks with no observed reference (sidebars not memoized, modals not memoized, inline arrows on non-memoized children); keep the ones that feed into // (memoized) or grid-side useCallback deps. - Dead props: drop onQueryOptionsChange/onRequestDeleteTable/ onRequestImportCsv from
— the page-header lift made them unused but the props weren't removed. - React Query: drop redundant tableWorkflowGroupsRef (created when onRunRows was useCallback-wrapped; after callback cleanup it can read the query data directly). - emcn: normalize Loader sizing to h-[14px] w-[14px] to match the codebase convention. Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): re-seed columnOrder when columns change server-side The metadata-seed effect short-circuited after the first seed, so any later schema change (e.g. adding a workflow output column) couldn't push the new column into local columnOrder. The new column would then fall into the "remaining" bucket of `displayColumns` and render at the end of the table — until the user refreshed and the grid re-mounted with the now-current metadata. Drop the `metadataSeededRef.current` short-circuit from the early return so the effect can also reach the after-first-load re-seed branch, which already does the right thing (only re-seeds when the set of columns changes, leaves pure-reorder cases alone). Co-Authored-By: Claude Opus 4.7 (1M context) * refactor(tables): rename wrapper to
, grid to Match the naming convention used elsewhere in the workspace (workflow.tsx → , base.tsx → , logs.tsx → ). - tables-detail.tsx → table.tsx (exports
) - components/table/ → components/table-grid/ (exports ) - components/table-grid/table.tsx → table-grid.tsx - Drop — was a 3-line passthrough (executionId → useLogByExecutionId → ); inline directly into table.tsx where it's used. - Flatten components/run-status-control/ folder to a single components/run-status-control.tsx file. 25-line single-use component with no internal subdirs — folder was overhead. Matches knowledge's max-badge.tsx precedent. Net: 1 wrapper rename + grid rename + 2 folder collapses, all imports updated. The mothership chat callsite updates from to
. Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): don't show "Waiting" for autoRun=false workflow groups A workflow group with autoRun=false never fires from the scheduler — the cell stays empty until the user clicks Run manually. Treating empty cells as "Waiting" misleads the user into thinking the group will auto-fire once deps are filled, which it won't. Skip autoRun=false groups when computing the per-row waiting labels so their cells render the empty-dash instead of the Waiting pill. Co-Authored-By: Claude Opus 4.7 (1M context) * chore(copilot): regenerate tool catalog from copilot dev (#247) Pulls in the workflow_group operations on user_table: add_workflow_group / update_workflow_group / delete_workflow_group / add_workflow_group_output / delete_workflow_group_output / run_workflow_group, plus the autoRun / blockId / dependencies / groupId parameters and a tightened mapping description for import_file. Also picks up biome import-order fixes from `bun run lint`. * improvement(table): action bar in mothership + per-execution mode Three related improvements to the table action bar: 1. Reposition from `position: fixed` to `position: absolute` inside the table's container. Fixed-positioning anchored to the viewport, which centered the bar across the whole window instead of the table panel — wrong in mothership embedded view, where the table sits in the right half. Absolute scopes the bar to the table's bounds. 2. Show the bar for single-execution highlights — when the user selects one workflow-output cell, or 1 row × N cols all within the same workflow group. The bar enters per-execution mode with Run / Stop / View execution buttons targeting that one cell or group. 3. Skip View execution for cancelled cells. A cancelled cell may have been cancelled before the worker ever picked the job up, so its executionId can't be relied on. Tighten the gate everywhere (context menu + action bar) to only `completed` / `error` / `running`. Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): backfill on add_workflow_group_output, don't re-run addWorkflowGroupOutput (the one-shot single-output add path used by the copilot user_table tool) was calling triggerWorkflowGroupRun({ mode: 'all' }) after appending the output — that re-fired the workflow on every row. Trace a307ed8fd5fe2d931aa84dedab5a60f0 shows ~75 workflow-group-cell jobs enqueued in the seconds after a single add_workflow_group_output call. Replace with backfillGroupOutputsFromLogs (overwrite: false), the same flow updateWorkflowGroup uses when receiving newOutputColumns. Reads each row's saved trace spans and writes the new output's value back — no compute beyond a JSONB write per row, no double-billing the user for runs they didn't ask for. Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): drop sql.raw quote-escaping in column-name interpolation Six call sites in lib/table/service.ts built JSON-key string literals at runtime via `sql.raw(\`'\${name.replace(/'/g, "''")}'\`)` for use with PostgreSQL's `data->'key'` / `data->>'key'` operators. Practically safe (NAME_PATTERN gates column names to alphanumeric+underscore at insert time) but a smelly pattern that breaks the moment validation loosens. Both `data->` and `data->>` accept a parameterized text value as the key, so the `sql.raw` is unnecessary. Replace each with a normal `${name}::text` binding. No behavior change; eliminates the manual quote-escaping surface. Affected sites: renameColumn (the data-rewrite UPDATE), upsertRow's match filter, updateColumnType's IS-NOT-NULL gate, updateColumnConstraints' required-check + unique-duplicate-check. Co-Authored-By: Claude Opus 4.7 (1M context) * feat(copilot-tool): forward autoRun + mappingUpdates on update_workflow_group The sim-side service and contracts already accept both fields, but the copilot tool's update_workflow_group handler was dropping them on the floor. Now `args.autoRun` (toggle the persisted auto-fire flag) and `args.mappingUpdates` (per-output (blockId, path) swap) get forwarded through to updateWorkflowGroup. Pairs with the upcoming copilot-side change that exposes these in the tool catalog JSON / Go handler / prompting (see copilot branch redo-workflow-tools). Co-Authored-By: Claude Opus 4.7 (1M context) * fix(table): keep gutter border visible when hovering Run-row button The per-row Run button sat flush against the row-gutter cell's right border. Its hover background (rounded-rect surface-2) painted over the border line for the 20px height of the button, making the gutter divider appear to disappear at the hovered row. Add mr-px to the button so the hover bg stops 1px short of the cell's right edge, leaving the divider intact. * fix(table): unify auto-fire and manual run paths in scheduler scheduleWorkflowGroupRuns now owns eligibility, autoRun semantics, dep evaluation, and enqueue for both paths. Auto-fire callers omit opts; manual callers (triggerWorkflowGroupRun) pass { groupId, isManualRun: true } to bypass the autoRun=false skip and (for autoRun=false groups) the dep check. Per-row /run-workflow-group route delegates to triggerWorkflowGroupRun with rowIds=[rowId]. Single server-side path for both manual entry points. Also: optimisticallyScheduleNewlyEligibleGroups skips autoRun=false groups so editing a row's data doesn't phantom-mark autoRun=false output cells as Queued. * fix(table): render empty cells as blank, not em-dash Empty cells (any column type) showed an em-dash placeholder. Drop it so empty cells render blank — matches what the user expects when nothing's there. * fix(table): per-row Run fires autoRun=false groups regardless of deps handleRunRow filtered out every group whose deps weren't satisfied, which silently dropped autoRun=false groups (since their deps usually aren't satisfied — that's the whole point of autoRun=false). Click Run row, the autoRun=false group's cells stayed empty. Mirror the scheduler's semantics: autoRun=false bypasses the dep check, autoRun=true still requires deps. * fix ui shape * improvement(table): collapse run ops into run_column, derive action-bar buttons from selection The action bar now reflects what's actually selected: - Selection-driven scope (cells the user highlighted, not their full rows) - Play visible when there's anything empty/failed; Refresh when there's anything completed; both for mixed - run_cell / run_row deleted; everything funnels through run_column - Per-row gutter Play, right-click "Run workflows on N rows", and column-header menu all share the canonical run path - Shared RunMode type from the contract; cleanup pass via /simplify (readExecution / isExecInFlight reuse, runScope helper, flat onViewExecution prop) * chore(copilot): regen tool catalog after dropping run_cell / run_row + dependencies.workflowGroups Mirror the copilot-side catalog change so the generated TS catalog matches the deployed copilot tool surface. * fix(table): atomic per-key writes for executions, plus run-op race fixes The executions blob on user_table_rows was read-modify-written wholesale on every update. Concurrent writers (a column edit and a manual-retry stamp, two pickup calls, a cancel and a cascade) each computed a merge from their own snapshot, and the last writer clobbered keys it never touched — producing stuck "queued" cells, vanished stamps, and stale completed exec records reappearing after retries. Fixes: - updateRow / batchUpdateRows now apply executionsPatch via a SQL jsonb merge expression. Each writer only mutates the keys it explicitly patches; other keys are preserved. Eliminates the cross-key clobber. - writeWorkflowGroupState bypasses the stale-worker guard for `queued` (new scheduler stamp) and `cancelled` (authoritative cancel) writes — those ARE the new authority for the cell. Previously the new run's stamp was being rejected by the same guard meant to block the OLD worker's writes. - skipScheduler flag on UpdateRowData / BatchUpdateByIdData lets the cancel path and runWorkflowGroupsInternal opt out of the implicit auto-fire pass (cancel was waking up siblings; manual-run was racing its own scheduler). - CELL_CONTENT pinned to h-[22px] so status badges don't grow rows. * chore(table): remove table-row sockets, both sides Tables don't use realtime sockets in prod — strip the dead path so we stop paying the per-row HTTP forward + socket emit on every cell write. Polling on running execs already covers reconciliation. Sim side: - service.ts: drop notifyTableRowUpdated/Deleted, notifyTableDeleted, the postRealtimeBridge helper, and all callsites. - hooks/queries/tables.ts: drop the socket subscription block in useTableRows; poll-on-running stays. Remove useEffect / useSocket imports. - app/.../tables/[tableId]/hooks/use-table.ts: drop the merge-on-event useEffect and unused imports. - app/workspace/providers/socket-provider.tsx: drop joinTable/leaveTable, onTableRowUpdated/Deleted/onTableDeleted, currentTableId state, related events + types. Realtime side: - handlers/tables.ts deleted; index.ts no longer wires it. - routes/http.ts: drop /api/table-row-updated, /api/table-row-deleted, /api/table-deleted endpoints. - rooms/{memory,redis}-manager.ts: drop emitToTable, handleTableRowUpdated/ Deleted, handleTableDeleted, related imports. - rooms/types.ts: drop method declarations, TableRowUpdatedPayload type, tableRoomName helper. - middleware/permissions.ts: drop unused verifyTableAccess. Bonus from parallel work: - cell-content typewriter trigger refinement. * fix(table): clearing a workflow output cell also clears its exec record When the user wipes a workflow output column value, the auto-fire reactor needs to be re-armed for that group. Previously, a stale cancelled / error exec record blocked the eligibility predicate (gate at line 79 hard-rejects those statuses on auto-fire) and the cell stayed stuck in its old terminal state — visible as "Cancelled" cells that wouldn't re-run no matter what. Both updateRow and batchUpdateRows now derive an `executionsPatch[gid] = null` for any output column the patch sets to empty. The data clear and the exec clear ride the same SQL transaction, so the row never lands in a stale- status-with-empty-data state. Symmetric to how `completed` already worked via `areOutputsFilled` in the predicate — clearing the cell wins over the prior exec status, regardless of what that status was. (Also revert typewriter-trigger experiment from a parallel session that was in-progress on this branch.) * fix(table): waiting state, optimistic UX, schema-mutation polling, exec cleanup A bundle of small UX + correctness fixes around workflow-cell run state. cell-render.tsx - In-flight (queued/running/pending) now wins over the existing value, so re-runs surface immediately instead of looking like nothing happened until the worker writes the new value. - "Waiting on X" wins over a stale `cancelled` / `error` exec when deps are unmet — clearing a dep now reads as actionable instead of stuck. useRunColumn (hooks/queries/tables.ts) - onSettled now cancels in-flight polls before invalidating. Stops a poll that landed mid-mutation from clobbering the optimistic state with stale data, which produced the queued → cancelled → queued flicker. addWorkflowGroup / updateWorkflowGroup (autoRun toggle on) - Awaits scheduleRunsForTable instead of fire-and-forget. The route returned before the queued exec stamps committed, so the post-mutation refetch saw no in-flight cells and polling never started — cells looked stuck even though the server eventually stamped them. deleteColumn / deleteColumns - Strip orphaned executions[gid] keys when deleting a column orphans its parent group. Without this, stale running/queued exec records lingered on every row forever and inflated the page-header "N running" counter even on tables with no actually-running cells. UI - Action-bar leading label: "Selected N workflow cell(s)". - Context menu: Run / Refresh items mirror the action bar's Play / Refresh split, gated on the same selection-status flags so both surfaces show the actions that match the current state. * refactor(table): consolidate exec-status helpers + fix N-running counter Cleanup pass on the recent table changes — pulls duplicated predicates and SQL snippets into shared helpers and fixes one drift bug along the way. - isExecInFlight: now single export from lib/table/deps.ts. Removed the duplicate in components/table-grid/utils.ts. Used by isGroupEligible (server eligibility) and runningByRowId (client counter). - isOptimisticInFlight: kept local to hooks/queries/tables.ts — renamed from isInFlight to disambiguate from the stricter isExecInFlight. The two predicates differ on `pending` without a jobId: optimistic patches and poll-trigger want the broader version, eligibility wants the strict one. - areOutputsFilled: single export from lib/table/deps.ts, dropped duplicate from workflow-columns.ts. - classifyExecStatusMix: shared row × group walker in table-grid/utils.ts. Replaces two copies of the same loop in table-grid.tsx (selectionStats + contextMenuStats). Both surfaces now have the same short-circuit semantics, including the seen-all-selected-rows early break that contextMenuStats was missing. - stripGroupExecutions: SQL helper in service.ts. Replaces three copies of the `UPDATE user_table_rows SET executions = executions - $gid::text` pattern across deleteColumn / deleteColumns / deleteWorkflowGroup. Drift bug: - runningByRowId / totalRunning counted only `running` and `queued`. Every other in-flight check in the codebase treats post-stamp `pending` as in-flight too, so the page-header "N running" badge briefly dropped to 0 between scheduler stamp and worker pickup. Now uses isExecInFlight. * fix(table): address pr review (drop dead workflowNameById prop, reset didDragRef on dragend, align sidebar width) * fix(table): scope post-clear schedule to targeted groups, forward mode Multi-group manual runs (Run row, gutter Play, action-bar Play across mixed completed + cancelled cells) re-fired completed-and-filled siblings. runWorkflowGroupsInternal cleared only the groups it filtered, then called scheduleRunsForRows with isManualRun: true and no group / mode filter — so the post-clear pass walked every group on the table with default mode 'all', and any autoRun=true completed sibling whose deps were satisfied got queued again. Scope the post-clear call to targetGroups and forward mode. * fix(table): meta-cell drag-leave flicker guard + plumb unique on create * fix(table): strip sibling deps when removing workflow output via updateWorkflowGroup deleteWorkflowGroup already stripped removed-column deps from sibling groups, but updateWorkflowGroup (the path the UI takes when deleting one output of a multi-output group) didn't — schema validation then rejected the update with 'Group X depends on missing column Y'. * improvement(table): debug logs at every cascade decision branch * improvement(table): parallelize queued-stamp writes within concurrency-cap chunks * Simplify stripping column names * fix lint, ci --------- Co-authored-by: Claude Opus 4.7 (1M context) --- apps/realtime/src/handlers/index.ts | 2 - apps/realtime/src/handlers/tables.ts | 73 - apps/realtime/src/middleware/permissions.ts | 48 - apps/realtime/src/rooms/memory-manager.ts | 28 +- apps/realtime/src/rooms/redis-manager.ts | 27 +- apps/realtime/src/rooms/types.ts | 41 - apps/realtime/src/routes/http.ts | 46 - .../api/table/[tableId]/columns/run/route.ts | 48 + .../[tableId]/groups/[groupId]/run/route.ts | 67 - .../app/api/table/[tableId]/groups/route.ts | 4 + .../rows/[rowId]/run-workflow-group/route.ts | 97 -- .../resource-header/resource-header.tsx | 4 + .../resource-content/resource-content.tsx | 4 +- .../column-config-sidebar.tsx | 243 +++ .../column-types.ts | 9 +- .../components/column-config-sidebar/index.ts | 8 + .../column-sidebar/column-sidebar.tsx | 1314 --------------- .../components/context-menu/context-menu.tsx | 57 +- .../tables/[tableId]/components/index.ts | 7 +- .../components/new-column-dropdown/index.ts | 1 + .../new-column-dropdown.tsx | 79 + .../components/run-status-control.tsx | 41 + .../components/table-action-bar/index.ts | 1 + .../table-action-bar/table-action-bar.tsx | 172 ++ .../table-filter/{index.tsx => index.ts} | 0 .../table-grid/cells/cell-content.tsx | 59 + .../table-grid/cells/cell-render.tsx | 265 +++ .../cells/expanded-cell-popover.tsx | 0 .../components/table-grid/cells/index.ts | 4 + .../cells/inline-editors.tsx | 0 .../{table => table-grid}/constants.ts | 5 +- .../headers/column-header-menu.tsx | 79 +- .../table-grid/headers/column-type-icon.tsx | 50 + .../components/table-grid/headers/index.ts | 3 + .../headers/workflow-group-meta-cell.tsx | 178 +- .../[tableId]/components/table-grid/index.ts | 1 + .../table.tsx => table-grid/table-grid.tsx} | 1466 +++++++++-------- .../components/{table => table-grid}/types.ts | 0 .../components/{table => table-grid}/utils.ts | 55 +- .../components/table/cells/cell-content.tsx | 173 -- .../table/headers/column-type-icon.tsx | 59 - .../[tableId]/components/table/index.ts | 1 - .../components/workflow-sidebar/index.ts | 1 + .../workflow-sidebar/run-settings-section.tsx | 48 + .../workflow-sidebar/workflow-sidebar.tsx | 895 ++++++++++ .../tables/[tableId]/hooks/index.ts | 1 - .../[tableId]/hooks/use-row-execution.ts | 95 -- .../tables/[tableId]/hooks/use-table.ts | 133 +- .../[workspaceId]/tables/[tableId]/page.tsx | 2 +- .../[workspaceId]/tables/[tableId]/table.tsx | 685 ++++++++ .../[workspaceId]/tables/[tableId]/utils.ts | 22 +- .../import-csv-dialog/import-csv-dialog.tsx | 10 +- .../mcp-dynamic-args/mcp-dynamic-args.tsx | 14 +- .../panel/components/editor/editor.tsx | 16 +- .../w/[workflowId]/components/panel/panel.tsx | 4 +- .../preview-editor/preview-editor.tsx | 23 +- .../workspace/providers/socket-provider.tsx | 122 -- apps/sim/background/resume-execution.ts | 141 ++ .../background/workflow-column-execution.ts | 50 +- .../field-divider/field-divider.tsx | 42 + apps/sim/components/emcn/components/index.ts | 1 + apps/sim/components/emcn/icons/index.ts | 1 + apps/sim/components/emcn/icons/square.tsx | 27 + apps/sim/hooks/queries/tables.ts | 303 ++-- apps/sim/lib/api/client/errors.ts | 80 + apps/sim/lib/api/contracts/tables.ts | 76 +- apps/sim/lib/billing/cleanup-dispatcher.ts | 67 +- .../lib/copilot/generated/tool-catalog-v1.ts | 134 +- .../lib/copilot/generated/tool-schemas-v1.ts | 137 +- .../copilot/tools/server/table/user-table.ts | 30 +- .../lib/core/async-jobs/backends/database.ts | 136 +- .../core/async-jobs/backends/trigger-dev.ts | 17 + apps/sim/lib/core/async-jobs/inline-abort.ts | 35 - apps/sim/lib/core/async-jobs/types.ts | 28 +- apps/sim/lib/table/cell-write.ts | 51 +- apps/sim/lib/table/deps.ts | 144 ++ apps/sim/lib/table/service.ts | 705 +++++--- apps/sim/lib/table/types.ts | 40 +- apps/sim/lib/table/validation.ts | 13 +- apps/sim/lib/table/workflow-columns.ts | 633 ++++--- apps/sim/tools/exa/search.ts | 2 +- scripts/check-api-validation-contracts.ts | 4 +- 82 files changed, 5830 insertions(+), 3957 deletions(-) delete mode 100644 apps/realtime/src/handlers/tables.ts create mode 100644 apps/sim/app/api/table/[tableId]/columns/run/route.ts delete mode 100644 apps/sim/app/api/table/[tableId]/groups/[groupId]/run/route.ts delete mode 100644 apps/sim/app/api/table/[tableId]/rows/[rowId]/run-workflow-group/route.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/column-config-sidebar.tsx rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{column-sidebar => column-config-sidebar}/column-types.ts (66%) create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/index.ts delete mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-sidebar.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/new-column-dropdown/index.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/new-column-dropdown/new-column-dropdown.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/run-status-control.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-action-bar/index.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-action-bar/table-action-bar.tsx rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter/{index.tsx => index.ts} (100%) create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-content.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-render.tsx rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{table => table-grid}/cells/expanded-cell-popover.tsx (100%) create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/index.ts rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{table => table-grid}/cells/inline-editors.tsx (100%) rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{table => table-grid}/constants.ts (61%) rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{table => table-grid}/headers/column-header-menu.tsx (83%) create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/column-type-icon.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/index.ts rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{table => table-grid}/headers/workflow-group-meta-cell.tsx (59%) create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/index.ts rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{table/table.tsx => table-grid/table-grid.tsx} (74%) rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{table => table-grid}/types.ts (100%) rename apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/{table => table-grid}/utils.ts (76%) delete mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/cell-content.tsx delete mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/column-type-icon.tsx delete mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/index.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/index.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/run-settings-section.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/workflow-sidebar.tsx delete mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-row-execution.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table.tsx create mode 100644 apps/sim/components/emcn/components/field-divider/field-divider.tsx create mode 100644 apps/sim/components/emcn/icons/square.tsx delete mode 100644 apps/sim/lib/core/async-jobs/inline-abort.ts create mode 100644 apps/sim/lib/table/deps.ts diff --git a/apps/realtime/src/handlers/index.ts b/apps/realtime/src/handlers/index.ts index 8977eea550a..6ded2e54741 100644 --- a/apps/realtime/src/handlers/index.ts +++ b/apps/realtime/src/handlers/index.ts @@ -2,7 +2,6 @@ import { setupConnectionHandlers } from '@/handlers/connection' import { setupOperationsHandlers } from '@/handlers/operations' import { setupPresenceHandlers } from '@/handlers/presence' import { setupSubblocksHandlers } from '@/handlers/subblocks' -import { setupTableHandlers } from '@/handlers/tables' import { setupVariablesHandlers } from '@/handlers/variables' import { setupWorkflowHandlers } from '@/handlers/workflow' import type { AuthenticatedSocket } from '@/middleware/auth' @@ -14,6 +13,5 @@ export function setupAllHandlers(socket: AuthenticatedSocket, roomManager: IRoom setupSubblocksHandlers(socket, roomManager) setupVariablesHandlers(socket, roomManager) setupPresenceHandlers(socket, roomManager) - setupTableHandlers(socket, roomManager) setupConnectionHandlers(socket, roomManager) } diff --git a/apps/realtime/src/handlers/tables.ts b/apps/realtime/src/handlers/tables.ts deleted file mode 100644 index ae9a7c6f003..00000000000 --- a/apps/realtime/src/handlers/tables.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { createLogger } from '@sim/logger' -import type { AuthenticatedSocket } from '@/middleware/auth' -import { verifyTableAccess } from '@/middleware/permissions' -import { type IRoomManager, tableRoomName } from '@/rooms/types' - -const logger = createLogger('TableHandlers') - -/** - * Wires `join-table` / `leave-table` socket events. Tables don't track presence - * or last-modified state — joining is a thin wrapper around `socket.join` so the - * Sim API → Realtime HTTP bridge can broadcast row updates back to subscribed clients. - */ -export function setupTableHandlers(socket: AuthenticatedSocket, _roomManager: IRoomManager) { - socket.on('join-table', async ({ tableId }: { tableId?: string }) => { - try { - if (!tableId || typeof tableId !== 'string') { - socket.emit('join-table-error', { - tableId: tableId ?? null, - error: 'tableId required', - code: 'INVALID_TABLE_ID', - retryable: false, - }) - return - } - - const userId = socket.userId - if (!userId) { - socket.emit('join-table-error', { - tableId, - error: 'Authentication required', - code: 'AUTHENTICATION_REQUIRED', - retryable: false, - }) - return - } - - const { hasAccess } = await verifyTableAccess(userId, tableId) - if (!hasAccess) { - socket.emit('join-table-error', { - tableId, - error: 'Access denied to table', - code: 'ACCESS_DENIED', - retryable: false, - }) - return - } - - const room = tableRoomName(tableId) - socket.join(room) - socket.emit('join-table-success', { tableId, socketId: socket.id }) - logger.debug(`Socket ${socket.id} (user ${userId}) joined ${room}`) - } catch (error) { - logger.error(`Error joining table room:`, error) - socket.emit('join-table-error', { - tableId: null, - error: 'Failed to join table', - code: 'JOIN_TABLE_FAILED', - retryable: true, - }) - } - }) - - socket.on('leave-table', async ({ tableId }: { tableId?: string }) => { - try { - if (!tableId || typeof tableId !== 'string') return - const room = tableRoomName(tableId) - socket.leave(room) - logger.debug(`Socket ${socket.id} left ${room}`) - } catch (error) { - logger.error(`Error leaving table room:`, error) - } - }) -} diff --git a/apps/realtime/src/middleware/permissions.ts b/apps/realtime/src/middleware/permissions.ts index db97b16f8a2..dcc893b1478 100644 --- a/apps/realtime/src/middleware/permissions.ts +++ b/apps/realtime/src/middleware/permissions.ts @@ -131,51 +131,3 @@ export async function verifyWorkflowAccess( return { hasAccess: false } } } - -/** - * Verify a user has read access to a table by virtue of workspace permission. - * Mirrors `verifyWorkflowAccess` for the table-room socket join check. - */ -export async function verifyTableAccess( - userId: string, - tableId: string -): Promise<{ hasAccess: boolean; workspaceId?: string }> { - try { - const { userTableDefinitions, permissions } = await import('@sim/db') - const tableData = await db - .select({ workspaceId: userTableDefinitions.workspaceId }) - .from(userTableDefinitions) - .where(and(eq(userTableDefinitions.id, tableId), isNull(userTableDefinitions.archivedAt))) - .limit(1) - - if (!tableData.length) { - logger.warn(`Table ${tableId} not found`) - return { hasAccess: false } - } - const { workspaceId } = tableData[0] - if (!workspaceId) return { hasAccess: false } - - const [permissionRow] = await db - .select({ permissionType: permissions.permissionType }) - .from(permissions) - .where( - and( - eq(permissions.userId, userId), - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workspaceId) - ) - ) - .limit(1) - - if (!permissionRow?.permissionType) { - logger.warn( - `User ${userId} has no permission for workspace ${workspaceId} (table ${tableId})` - ) - return { hasAccess: false } - } - return { hasAccess: true, workspaceId } - } catch (error) { - logger.error(`Error verifying table access for user ${userId}, table ${tableId}:`, error) - return { hasAccess: false } - } -} diff --git a/apps/realtime/src/rooms/memory-manager.ts b/apps/realtime/src/rooms/memory-manager.ts index 0cd37daf493..a032e785bb5 100644 --- a/apps/realtime/src/rooms/memory-manager.ts +++ b/apps/realtime/src/rooms/memory-manager.ts @@ -1,13 +1,6 @@ import { createLogger } from '@sim/logger' import type { Server } from 'socket.io' -import { - type IRoomManager, - type TableRowUpdatedPayload, - tableRoomName, - type UserPresence, - type UserSession, - type WorkflowRoom, -} from '@/rooms/types' +import type { IRoomManager, UserPresence, UserSession, WorkflowRoom } from '@/rooms/types' const logger = createLogger('MemoryRoomManager') @@ -262,23 +255,4 @@ export class MemoryRoomManager implements IRoomManager { logger.info(`Notified ${room.users.size} users about workflow deployment change: ${workflowId}`) } - - emitToTable(tableId: string, event: string, payload: T): void { - this._io.to(tableRoomName(tableId)).emit(event, payload) - } - - async handleTableRowUpdated(tableId: string, payload: TableRowUpdatedPayload): Promise { - this.emitToTable(tableId, 'table-row-updated', { tableId, ...payload }) - } - - async handleTableRowDeleted(tableId: string, rowId: string): Promise { - this.emitToTable(tableId, 'table-row-deleted', { tableId, rowId }) - } - - async handleTableDeleted(tableId: string): Promise { - logger.info(`Handling table deletion notification for ${tableId}`) - this.emitToTable(tableId, 'table-deleted', { tableId, timestamp: Date.now() }) - // Eject sockets so they don't hold a stale room. Cross-pod safe via socket.io. - await this._io.in(tableRoomName(tableId)).socketsLeave(tableRoomName(tableId)) - } } diff --git a/apps/realtime/src/rooms/redis-manager.ts b/apps/realtime/src/rooms/redis-manager.ts index 0fb41417906..0e6b3eadf2b 100644 --- a/apps/realtime/src/rooms/redis-manager.ts +++ b/apps/realtime/src/rooms/redis-manager.ts @@ -1,13 +1,7 @@ import { createLogger } from '@sim/logger' import { createClient, type RedisClientType } from 'redis' import type { Server } from 'socket.io' -import { - type IRoomManager, - type TableRowUpdatedPayload, - tableRoomName, - type UserPresence, - type UserSession, -} from '@/rooms/types' +import type { IRoomManager, UserPresence, UserSession } from '@/rooms/types' const logger = createLogger('RedisRoomManager') @@ -463,23 +457,4 @@ export class RedisRoomManager implements IRoomManager { const userCount = await this.getUniqueUserCount(workflowId) logger.info(`Notified ${userCount} users about workflow deployment change: ${workflowId}`) } - - emitToTable(tableId: string, event: string, payload: T): void { - this._io.to(tableRoomName(tableId)).emit(event, payload) - } - - async handleTableRowUpdated(tableId: string, payload: TableRowUpdatedPayload): Promise { - this.emitToTable(tableId, 'table-row-updated', { tableId, ...payload }) - } - - async handleTableRowDeleted(tableId: string, rowId: string): Promise { - this.emitToTable(tableId, 'table-row-deleted', { tableId, rowId }) - } - - async handleTableDeleted(tableId: string): Promise { - logger.info(`Handling table deletion notification for ${tableId}`) - this.emitToTable(tableId, 'table-deleted', { tableId, timestamp: Date.now() }) - // Eject sockets across all pods via socket.io's Redis adapter. - await this._io.in(tableRoomName(tableId)).socketsLeave(tableRoomName(tableId)) - } } diff --git a/apps/realtime/src/rooms/types.ts b/apps/realtime/src/rooms/types.ts index 9c15c967d54..9553a427e1e 100644 --- a/apps/realtime/src/rooms/types.ts +++ b/apps/realtime/src/rooms/types.ts @@ -143,45 +143,4 @@ export interface IRoomManager { * Handle workflow deployment change - notify users to refresh deployment state */ handleWorkflowDeployed(workflowId: string): Promise - - /** - * Emit an event to all clients in a table room (`table:${tableId}`). - * Tables don't track presence/last-modified state — just pub/sub. - */ - emitToTable(tableId: string, event: string, payload: T): void - - /** - * Notify all clients in a table room of a row write (insert/update/cell-state-change). - * Sim API calls this via the `/api/table-row-updated` HTTP bridge after every successful - * row commit; the client merges the delta into its React Query cache. - */ - handleTableRowUpdated(tableId: string, payload: TableRowUpdatedPayload): Promise - - /** - * Notify all clients in a table room that a row has been deleted. - */ - handleTableRowDeleted(tableId: string, rowId: string): Promise - - /** - * Notify all clients in a table room that the table has been deleted; eject sockets. - */ - handleTableDeleted(tableId: string): Promise -} - -/** - * Payload broadcast on `table-row-updated`. Mirrors the shape of `TableRow.data` so - * the client can merge directly into its React Query rows cache. `position` and - * `updatedAt` are included for cache reconciliation; `data` is the full row data - * (not a per-cell delta) — see plan Notes. - */ -export interface TableRowUpdatedPayload { - rowId: string - data: Record - /** Per-workflow-group execution state. Keyed by `WorkflowGroup.id`. */ - executions?: Record - position: number - updatedAt: string | number } - -/** Socket.IO room name for a table. Namespaced from workflow rooms. */ -export const tableRoomName = (tableId: string): string => `table:${tableId}` diff --git a/apps/realtime/src/routes/http.ts b/apps/realtime/src/routes/http.ts index 78cd89e63d9..0f8ed73cc52 100644 --- a/apps/realtime/src/routes/http.ts +++ b/apps/realtime/src/routes/http.ts @@ -150,52 +150,6 @@ export function createHttpHandler(roomManager: IRoomManager, logger: Logger) { return } - // Handle table row write notifications from the Sim API - if (req.method === 'POST' && req.url === '/api/table-row-updated') { - try { - const body = await readRequestBody(req) - const { tableId, rowId, data, executions, position, updatedAt } = JSON.parse(body) - await roomManager.handleTableRowUpdated(tableId, { - rowId, - data, - executions, - position, - updatedAt, - }) - sendSuccess(res) - } catch (error) { - logger.error('Error handling table row update notification:', error) - sendError(res, 'Failed to process table row update') - } - return - } - - if (req.method === 'POST' && req.url === '/api/table-row-deleted') { - try { - const body = await readRequestBody(req) - const { tableId, rowId } = JSON.parse(body) - await roomManager.handleTableRowDeleted(tableId, rowId) - sendSuccess(res) - } catch (error) { - logger.error('Error handling table row deletion notification:', error) - sendError(res, 'Failed to process table row deletion') - } - return - } - - if (req.method === 'POST' && req.url === '/api/table-deleted') { - try { - const body = await readRequestBody(req) - const { tableId } = JSON.parse(body) - await roomManager.handleTableDeleted(tableId) - sendSuccess(res) - } catch (error) { - logger.error('Error handling table deletion notification:', error) - sendError(res, 'Failed to process table deletion') - } - return - } - res.writeHead(404, { 'Content-Type': 'application/json' }) res.end(JSON.stringify({ error: 'Not found' })) } diff --git a/apps/sim/app/api/table/[tableId]/columns/run/route.ts b/apps/sim/app/api/table/[tableId]/columns/run/route.ts new file mode 100644 index 00000000000..7b997c9c232 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/columns/run/route.ts @@ -0,0 +1,48 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { runColumnContract } from '@/lib/api/contracts/tables' +import { parseRequest } from '@/lib/api/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { runWorkflowColumn } from '@/lib/table/workflow-columns' +import { accessError, checkAccess } from '@/app/api/table/utils' + +const logger = createLogger('TableRunColumnAPI') + +interface RouteParams { + params: Promise<{ tableId: string }> +} + +/** POST /api/table/[tableId]/columns/run */ +export const POST = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { + const requestId = generateRequestId() + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + const parsed = await parseRequest(runColumnContract, request, { params }) + if (!parsed.success) return parsed.response + const { tableId } = parsed.data.params + const { workspaceId, groupIds, runMode, rowIds } = parsed.data.body + const access = await checkAccess(tableId, auth.userId, 'write') + if (!access.ok) return accessError(access, requestId, tableId) + + const { triggered } = await runWorkflowColumn({ + tableId, + workspaceId, + groupIds, + mode: runMode, + rowIds, + requestId, + }) + return NextResponse.json({ success: true, data: { triggered } }) + } catch (error) { + if (error instanceof Error && error.message === 'Invalid workspace ID') { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + logger.error(`run-column failed:`, error) + return NextResponse.json({ error: 'Failed to run columns' }, { status: 500 }) + } +}) diff --git a/apps/sim/app/api/table/[tableId]/groups/[groupId]/run/route.ts b/apps/sim/app/api/table/[tableId]/groups/[groupId]/run/route.ts deleted file mode 100644 index 80f80bb7945..00000000000 --- a/apps/sim/app/api/table/[tableId]/groups/[groupId]/run/route.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { createLogger } from '@sim/logger' -import { type NextRequest, NextResponse } from 'next/server' -import { runWorkflowGroupContract } from '@/lib/api/contracts/tables' -import { parseRequest } from '@/lib/api/server' -import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' -import { generateRequestId } from '@/lib/core/utils/request' -import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import { triggerWorkflowGroupRun } from '@/lib/table/workflow-columns' -import { accessError, checkAccess } from '@/app/api/table/utils' - -const logger = createLogger('TableRunGroupAPI') - -interface RouteParams { - params: Promise<{ tableId: string; groupId: string }> -} - -/** - * POST /api/table/[tableId]/groups/[groupId]/run - * - * Manually triggers the workflow group for every eligible row in the table. - * Each eligible row's `executions[groupId]` is reset to `pending` so the - * scheduler picks it up and enqueues a per-cell trigger.dev job. Rows whose - * deps aren't satisfied or whose group is already running are skipped. - */ -export const POST = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { - const requestId = generateRequestId() - - try { - const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) - if (!authResult.success || !authResult.userId) { - return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) - } - - const parsed = await parseRequest(runWorkflowGroupContract, request, { params }) - if (!parsed.success) return parsed.response - const { tableId, groupId } = parsed.data.params - const { workspaceId, runMode, rowIds } = parsed.data.body - - const result = await checkAccess(tableId, authResult.userId, 'write') - if (!result.ok) return accessError(result, requestId, tableId) - const { table } = result - - if (table.workspaceId !== workspaceId) { - return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) - } - - const { triggered } = await triggerWorkflowGroupRun({ - tableId, - groupId, - workspaceId, - mode: runMode, - requestId, - rowIds, - }) - - return NextResponse.json({ success: true, data: { triggered } }) - } catch (error) { - if (error instanceof Error && error.message === 'Workflow group not found') { - return NextResponse.json({ error: 'Workflow group not found' }, { status: 404 }) - } - if (error instanceof Error && error.message === 'Invalid workspace ID') { - return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) - } - logger.error(`run-group failed:`, error) - return NextResponse.json({ error: 'Failed to run group' }, { status: 500 }) - } -}) diff --git a/apps/sim/app/api/table/[tableId]/groups/route.ts b/apps/sim/app/api/table/[tableId]/groups/route.ts index 847647fc397..bf74653212a 100644 --- a/apps/sim/app/api/table/[tableId]/groups/route.ts +++ b/apps/sim/app/api/table/[tableId]/groups/route.ts @@ -110,6 +110,10 @@ export const PATCH = withRouteHandler(async (request: NextRequest, { params }: R ...(validated.newOutputColumns !== undefined ? { newOutputColumns: validated.newOutputColumns } : {}), + ...(validated.mappingUpdates !== undefined + ? { mappingUpdates: validated.mappingUpdates } + : {}), + ...(validated.autoRun !== undefined ? { autoRun: validated.autoRun } : {}), }, requestId ) diff --git a/apps/sim/app/api/table/[tableId]/rows/[rowId]/run-workflow-group/route.ts b/apps/sim/app/api/table/[tableId]/rows/[rowId]/run-workflow-group/route.ts deleted file mode 100644 index aee786d226d..00000000000 --- a/apps/sim/app/api/table/[tableId]/rows/[rowId]/run-workflow-group/route.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { createLogger } from '@sim/logger' -import { generateId } from '@sim/utils/id' -import { type NextRequest, NextResponse } from 'next/server' -import { runRowWorkflowGroupContract } from '@/lib/api/contracts/tables' -import { parseRequest } from '@/lib/api/server' -import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' -import { generateRequestId } from '@/lib/core/utils/request' -import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import type { RowExecutionMetadata } from '@/lib/table' -import { updateRow } from '@/lib/table' -import { accessError, checkAccess } from '@/app/api/table/utils' - -const logger = createLogger('TableRunWorkflowGroupAPI') - -interface RouteParams { - params: Promise<{ tableId: string; rowId: string }> -} - -/** - * POST /api/table/[tableId]/rows/[rowId]/run-workflow-group - * - * Manually (re-)runs a workflow group for a single row by force-resetting - * `executions[groupId]` to `pending`. The `updateRow` call fires the - * scheduler which enqueues the cell job. - */ -export const POST = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { - const requestId = generateRequestId() - - try { - const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) - if (!authResult.success || !authResult.userId) { - return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) - } - - const parsed = await parseRequest(runRowWorkflowGroupContract, request, { params }) - if (!parsed.success) return parsed.response - const { tableId, rowId } = parsed.data.params - const { workspaceId, groupId } = parsed.data.body - - const result = await checkAccess(tableId, authResult.userId, 'write') - if (!result.ok) return accessError(result, requestId, tableId) - const { table } = result - - if (table.workspaceId !== workspaceId) { - return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) - } - - const group = (table.schema.workflowGroups ?? []).find((g) => g.id === groupId) - if (!group) { - return NextResponse.json({ error: 'Workflow group not found' }, { status: 404 }) - } - - const executionId = generateId() - const pendingExec: RowExecutionMetadata = { - status: 'pending', - executionId, - jobId: null, - workflowId: group.workflowId, - error: null, - } - /** - * Clear the group's output cells so the rerun starts visually fresh — - * otherwise stale values from the previous run linger in the UI until the - * new run writes new ones (or doesn't, on error/router-skip). - */ - const clearedData = Object.fromEntries(group.outputs.map((o) => [o.columnName, null])) - const updated = await updateRow( - { - tableId, - rowId, - data: clearedData, - workspaceId, - executionsPatch: { [groupId]: pendingExec }, - }, - table, - requestId - ) - if (updated === null) { - // The cell-task cancellation guard rejected the write — typically a - // racing stop click that already wrote `cancelled` for this run. - // Surface 409 so the caller doesn't poll indefinitely for a run that - // was never enqueued. - return NextResponse.json( - { error: 'Run was cancelled before it could be scheduled' }, - { status: 409 } - ) - } - - return NextResponse.json({ success: true, data: { executionId } }) - } catch (error) { - if (error instanceof Error && error.message === 'Row not found') { - return NextResponse.json({ error: 'Row not found' }, { status: 404 }) - } - logger.error(`run-workflow-group failed:`, error) - return NextResponse.json({ error: 'Failed to run workflow group' }, { status: 500 }) - } -}) diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx index 22686115782..9b4392d0110 100644 --- a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx @@ -55,6 +55,8 @@ interface ResourceHeaderProps { breadcrumbs?: BreadcrumbItem[] create?: CreateAction actions?: HeaderAction[] + /** Arbitrary content rendered in the right-aligned actions row, before `actions`. */ + leadingActions?: React.ReactNode /** Arbitrary content rendered in the right-aligned actions row, before the Create button. */ trailingActions?: React.ReactNode /** @@ -71,6 +73,7 @@ export const ResourceHeader = memo(function ResourceHeader({ breadcrumbs, create, actions, + leadingActions, trailingActions, createTrigger, }: ResourceHeaderProps) { @@ -106,6 +109,7 @@ export const ResourceHeader = memo(function ResourceHeader({ )}
+ {leadingActions} {actions?.map((action) => { const ActionIcon = action.icon return ( diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx index 1adfb0f3445..e93fe37cd6a 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx @@ -2,7 +2,6 @@ import { lazy, memo, Suspense, useEffect, useMemo, useRef } from 'react' import { createLogger } from '@sim/logger' -import { Square } from 'lucide-react' import { useRouter } from 'next/navigation' import { Button, PlayOutline, Skeleton, Tooltip } from '@/components/emcn' import { @@ -10,6 +9,7 @@ import { FileX, Folder as FolderIcon, Library, + Square, SquareArrowUpRight, WorkflowX, } from '@/components/emcn/icons' @@ -43,7 +43,7 @@ import { useUserPermissionsContext, useWorkspacePermissionsContext, } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' -import { Table } from '@/app/workspace/[workspaceId]/tables/[tableId]/components' +import { Table } from '@/app/workspace/[workspaceId]/tables/[tableId]/table' import { useUsageLimits } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks' import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution' import { useFolders } from '@/hooks/queries/folders' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/column-config-sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/column-config-sidebar.tsx new file mode 100644 index 00000000000..9792b758376 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/column-config-sidebar.tsx @@ -0,0 +1,243 @@ +'use client' + +import type React from 'react' +import { useState } from 'react' +import { toError } from '@sim/utils/errors' +import { X } from 'lucide-react' +import { Button, Combobox, FieldDivider, Input, Label, Switch, toast } from '@/components/emcn' +import { findValidationIssue, isValidationError } from '@/lib/api/client/errors' +import { cn } from '@/lib/core/utils/cn' +import type { ColumnDefinition } from '@/lib/table' +import { useAddTableColumn, useUpdateColumn } from '@/hooks/queries/tables' +import { PLAIN_COLUMN_TYPE_OPTIONS } from './column-types' + +/** + * Discriminates the two flows the column-config sidebar handles. Workflow + * configuration is a separate component (``) so this surface + * never has to branch on `isWorkflow`. + */ +export type ColumnConfig = + | { mode: 'create'; proposedName: string; type: ColumnDefinition['type'] } + | { mode: 'edit'; columnName: string } + +interface ColumnConfigSidebarProps { + /** When non-null the sidebar is open. */ + config: ColumnConfig | null + onClose: () => void + /** Existing column record for `mode: 'edit'`; ignored otherwise. */ + existingColumn: ColumnDefinition | null + workspaceId: string + tableId: string + /** Notify parent of a rename so it can rewrite local `columnOrder` / + * `columnWidths` keys that reference the old name. */ + onColumnRename?: (oldName: string, newName: string) => void +} + +/** + * Right-edge sidebar for plain (non-workflow) column configuration. Handles + * create (with type pre-chosen by the parent's "+ New column" dropdown) and + * edit. No `isWorkflow` branches — workflow-output columns route through + * `` instead. + * + * Form state seeds from props via lazy `useState` initializers; the parent + * uses `key={config?.columnName ?? 'closed'}` to remount when switching + * columns, eliminating the prop-mirroring `useEffect` the previous combined + * sidebar relied on. + */ +export function ColumnConfigSidebar(props: ColumnConfigSidebarProps) { + // Mount the form body with `key` keyed on the config identity so opening a + // different column / mode remounts and re-seeds state from props. + const open = props.config !== null + return ( + + ) +} + +function configKey(config: ColumnConfig): string { + return config.mode === 'edit' ? `edit:${config.columnName}` : `create:${config.proposedName}` +} + +interface ColumnConfigBodyProps extends Omit { + config: ColumnConfig +} + +function ColumnConfigBody({ + config, + onClose, + existingColumn, + workspaceId, + tableId, + onColumnRename, +}: ColumnConfigBodyProps) { + const updateColumn = useUpdateColumn({ workspaceId, tableId }) + const addColumn = useAddTableColumn({ workspaceId, tableId }) + + const [nameInput, setNameInput] = useState(() => + config.mode === 'edit' ? (existingColumn?.name ?? config.columnName) : config.proposedName + ) + const [typeInput, setTypeInput] = useState(() => + config.mode === 'edit' ? (existingColumn?.type ?? 'string') : config.type + ) + const [uniqueInput, setUniqueInput] = useState(() => + config.mode === 'edit' ? !!existingColumn?.unique : false + ) + const [showValidation, setShowValidation] = useState(false) + const [nameError, setNameError] = useState(null) + + const saveDisabled = updateColumn.isPending || addColumn.isPending + const trimmedName = nameInput.trim() + + async function handleSave() { + if (!trimmedName) { + setShowValidation(true) + return + } + + try { + if (config.mode === 'create') { + await addColumn.mutateAsync({ + name: trimmedName, + type: typeInput, + ...(uniqueInput ? { unique: true } : {}), + }) + toast.success(`Added "${trimmedName}"`) + onClose() + return + } + + const renamed = trimmedName !== config.columnName + const typeChanged = !!existingColumn && existingColumn.type !== typeInput + const uniqueChanged = !!existingColumn && !!existingColumn.unique !== uniqueInput + + const updates: { name?: string; type?: ColumnDefinition['type']; unique?: boolean } = { + ...(renamed ? { name: trimmedName } : {}), + ...(typeChanged ? { type: typeInput } : {}), + ...(uniqueChanged ? { unique: uniqueInput } : {}), + } + if (Object.keys(updates).length === 0) { + onClose() + return + } + + await updateColumn.mutateAsync({ columnName: config.columnName, updates }) + if (renamed) onColumnRename?.(config.columnName, trimmedName) + toast.success(`Saved "${trimmedName}"`) + onClose() + } catch (err) { + // Server validation errors carry a Zod issue array on the body; surface + // them inline next to the offending field instead of as a raw toast. + if (isValidationError(err)) { + const nameIssue = + findValidationIssue(err, ['updates', 'name']) ?? + findValidationIssue(err, ['name']) ?? + findValidationIssue(err, ['columnName']) + if (nameIssue) { + setNameError(nameIssue.message) + return + } + } + toast.error(toError(err).message) + } + } + + return ( +
+
+

Configure column

+ +
+ +
+
+ Column name + { + setNameInput(e.target.value) + if (nameError) setNameError(null) + }} + spellCheck={false} + autoComplete='off' + aria-invalid={(showValidation && !trimmedName) || nameError ? true : undefined} + /> + {showValidation && !trimmedName && } + {nameError && !(showValidation && !trimmedName) && } +
+ + {config.mode === 'edit' && ( + <> + +
+ Type + ({ + label: o.label, + value: o.type, + icon: o.icon, + }))} + value={typeInput} + onChange={(v) => setTypeInput(v as ColumnDefinition['type'])} + placeholder='Select type' + maxHeight={260} + /> +
+ + )} + + +
+
+ + setUniqueInput(!!v)} + /> +
+
+
+ +
+ + +
+
+ ) +} + +function RequiredLabel({ htmlFor, children }: { htmlFor?: string; children: React.ReactNode }) { + return ( + + ) +} + +function FieldError({ message }: { message: string }) { + return

{message}

+} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-types.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/column-types.ts similarity index 66% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-types.ts rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/column-types.ts index 10e392e82a1..6c9f31ade67 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-types.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/column-types.ts @@ -10,9 +10,9 @@ import { import type { ColumnDefinition } from '@/lib/table' /** - * UI-only column type. `'workflow'` is a virtual selection that lets the user - * configure a workflow group from the sidebar; on save, it expands into N real - * scalar columns + one workflow group, none of which carry a `'workflow'` type. + * UI-only column type. `'workflow'` is the virtual entry users pick from the + * "+ New column" dropdown to spawn a workflow group; the resulting columns are + * stored as scalar types under the hood (none carry `'workflow'`). */ export type SidebarColumnType = ColumnDefinition['type'] | 'workflow' @@ -30,3 +30,6 @@ export const COLUMN_TYPE_OPTIONS: ColumnTypeOption[] = [ { type: 'json', label: 'JSON', icon: TypeJson }, { type: 'workflow', label: 'Workflow', icon: PlayOutline }, ] + +/** Plain column types (no workflow). Used by ``'s type combobox in edit mode. */ +export const PLAIN_COLUMN_TYPE_OPTIONS = COLUMN_TYPE_OPTIONS.filter((o) => o.type !== 'workflow') diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/index.ts new file mode 100644 index 00000000000..e458001136d --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-config-sidebar/index.ts @@ -0,0 +1,8 @@ +export type { ColumnConfig } from './column-config-sidebar' +export { ColumnConfigSidebar } from './column-config-sidebar' +export { + COLUMN_TYPE_OPTIONS, + type ColumnTypeOption, + PLAIN_COLUMN_TYPE_OPTIONS, + type SidebarColumnType, +} from './column-types' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-sidebar.tsx deleted file mode 100644 index 73017fc25ec..00000000000 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-sidebar.tsx +++ /dev/null @@ -1,1314 +0,0 @@ -'use client' - -import type React from 'react' -import { useEffect, useMemo, useRef, useState } from 'react' -import { toError } from '@sim/utils/errors' -import { generateId } from '@sim/utils/id' -import { useMutation, useQueryClient } from '@tanstack/react-query' -import { - ChevronDown, - ChevronRight, - ExternalLink, - Loader2, - Plus, - RepeatIcon, - SplitIcon, - X, -} from 'lucide-react' -import { - Button, - Checkbox, - Combobox, - Expandable, - ExpandableContent, - Input, - Label, - Switch, - Tooltip, - toast, -} from '@/components/emcn' -import { requestJson } from '@/lib/api/client/request' -import type { - AddWorkflowGroupBodyInput, - UpdateWorkflowGroupBodyInput, -} from '@/lib/api/contracts/tables' -import { - putWorkflowNormalizedStateContract, - type WorkflowStateContractInput, -} from '@/lib/api/contracts/workflows' -import { cn } from '@/lib/core/utils/cn' -import type { - ColumnDefinition, - WorkflowGroup, - WorkflowGroupDependencies, - WorkflowGroupOutput, -} from '@/lib/table' -import { columnTypeForLeaf, deriveOutputColumnName } from '@/lib/table/column-naming' -import { - type FlattenOutputsBlockInput, - type FlattenOutputsEdgeInput, - flattenWorkflowOutputs, - getBlockExecutionOrder, -} from '@/lib/workflows/blocks/flatten-outputs' -import { normalizeInputFormatValue } from '@/lib/workflows/input-format' -import { TriggerUtils } from '@/lib/workflows/triggers/triggers' -import type { InputFormatField } from '@/lib/workflows/types' -import { PreviewWorkflow } from '@/app/workspace/[workspaceId]/w/components/preview' -import { getBlock } from '@/blocks' -import { - useAddTableColumn, - useAddWorkflowGroup, - useUpdateColumn, - useUpdateWorkflowGroup, -} from '@/hooks/queries/tables' -import { useWorkflowState, workflowKeys } from '@/hooks/queries/workflows' -import type { WorkflowMetadata } from '@/stores/workflows/registry/types' -import { COLUMN_SIDEBAR_WIDTH_CSS } from '../table/constants' -import { COLUMN_TYPE_OPTIONS, type SidebarColumnType } from './column-types' - -export type ColumnConfigState = - | { mode: 'edit'; columnName: string } - | { mode: 'new'; columnName: string; workflowId: string; proposedName: string } - | { - mode: 'create' - columnName: string - proposedName: string - /** When present, the sidebar opens with the workflow type pre-selected. */ - workflowId?: string - } - | null - -interface ColumnSidebarProps { - configState: ColumnConfigState - onClose: () => void - /** The current column record for edit mode. Null for new mode or closed. */ - existingColumn: ColumnDefinition | null - allColumns: ColumnDefinition[] - workflowGroups: WorkflowGroup[] - workflows: WorkflowMetadata[] | undefined - workspaceId: string - tableId: string -} - -const OUTPUT_VALUE_SEPARATOR = '::' - -/** Shared dashed-divider style — mirrors the workflow editor's subblock divider. */ -const DASHED_DIVIDER_STYLE = { - backgroundImage: - 'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)', -} as const - -/** Encodes blockId + path so duplicate field names across blocks stay distinct in the picker UI. */ -const encodeOutputValue = (blockId: string, path: string) => - `${blockId}${OUTPUT_VALUE_SEPARATOR}${path}` - -/** Splits an encoded `${blockId}::${path}` into its components for persistence. */ -const decodeOutputValue = (value: string): { blockId: string; path: string } => { - const idx = value.indexOf(OUTPUT_VALUE_SEPARATOR) - if (idx === -1) return { blockId: '', path: value } - return { blockId: value.slice(0, idx), path: value.slice(idx + OUTPUT_VALUE_SEPARATOR.length) } -} - -interface BlockOutputGroup { - blockId: string - blockName: string - blockType: string - blockIcon: string | React.ComponentType<{ className?: string }> - blockColor: string - paths: string[] -} - -/** - * Loose shape of `useWorkflowState` data — we only need the fields we round-trip - * through PUT /state. Typed locally to avoid pulling the heavy `WorkflowState` - * generic from `@/stores/workflows/workflow/types`. - */ -interface WorkflowStatePayload { - blocks: Record< - string, - { - type: string - subBlocks?: Record - } & Record - > - edges: unknown[] - loops: unknown - parallels: unknown - lastSaved?: number - isDeployed?: boolean -} - -function tableColumnTypeToInputType(colType: ColumnDefinition['type'] | undefined): string { - switch (colType) { - case 'number': - return 'number' - case 'boolean': - return 'boolean' - case 'json': - return 'object' - default: - return 'string' - } -} - -const TagIcon: React.FC<{ - icon: string | React.ComponentType<{ className?: string }> - color: string -}> = ({ icon, color }) => ( -
- {typeof icon === 'string' ? ( - {icon} - ) : ( - (() => { - const IconComponent = icon - return - })() - )} -
-) - -function FieldDivider() { - return ( -
-
-
- ) -} - -/** Mirrors the workflow editor's required-field label: title + asterisk. */ -function FieldLabel({ - htmlFor, - required, - children, -}: { - htmlFor?: string - required?: boolean - children: React.ReactNode -}) { - return ( - - ) -} - -/** Inline validation message styled like the workflow editor's destructive text. */ -function FieldError({ message }: { message: string }) { - return

{message}

-} - -/** - * Tinted inline warning row with a message on the left and an action button - * on the right. Stacks naturally — render multiple in sequence and they line - * up. Color mirrors the group-header deploy badge: `red` for blocking states, - * `amber` for soft warnings. - */ -function WarningRow({ - tone, - message, - action, -}: { - tone: 'red' | 'amber' - message: string - action: React.ReactNode -}) { - return ( -
- - {message} - -
{action}
-
- ) -} - -/** - * Collapsible "Run settings" section. Collapsed by default since outputs are - * the primary focus of the workflow flow — most users never need to touch - * the trigger conditions. The header shows a one-line summary of when the - * group will fire so the current state is visible without expanding. - */ -function RunSettingsSection({ - open, - onOpenChange, - summary, - scalarDepColumns, - groupDepOptions, - deps, - groupDeps, - workflows, - onToggleDep, - onToggleGroupDep, -}: { - open: boolean - onOpenChange: (open: boolean) => void - summary: string - scalarDepColumns: ColumnDefinition[] - groupDepOptions: WorkflowGroup[] - deps: string[] - groupDeps: string[] - workflows: WorkflowMetadata[] | undefined - onToggleDep: (name: string) => void - onToggleGroupDep: (groupId: string) => void -}) { - return ( -
- - - -
- {scalarDepColumns.length === 0 && groupDepOptions.length === 0 ? ( -
- No upstream columns or groups. -
- ) : ( - <> - {scalarDepColumns.map((c, idx) => { - const checked = deps.includes(c.name) - const isLast = idx === scalarDepColumns.length - 1 && groupDepOptions.length === 0 - return ( -
onToggleDep(c.name)} - onKeyDown={(e) => { - if (e.key === ' ' || e.key === 'Enter') { - e.preventDefault() - onToggleDep(c.name) - } - }} - className={cn( - 'flex h-[36px] flex-shrink-0 cursor-pointer items-center gap-2.5 px-2.5 hover:bg-[var(--surface-2)]', - !isLast && 'border-[var(--border)] border-b' - )} - > - - - {c.name} - - - {c.type} - -
- ) - })} - {groupDepOptions.map((g, idx) => { - const checked = groupDeps.includes(g.id) - const isLast = idx === groupDepOptions.length - 1 - const wf = workflows?.find((w) => w.id === g.workflowId) - const color = wf?.color ?? 'var(--text-muted)' - const label = g.name ?? wf?.name ?? 'Workflow' - return ( -
onToggleGroupDep(g.id)} - onKeyDown={(e) => { - if (e.key === ' ' || e.key === 'Enter') { - e.preventDefault() - onToggleGroupDep(g.id) - } - }} - className={cn( - 'flex h-[36px] flex-shrink-0 cursor-pointer items-center gap-2.5 px-2.5 hover:bg-[var(--surface-2)]', - !isLast && 'border-[var(--border)] border-b' - )} - > - -
- ) - })} - - )} -
-
-
-
- ) -} - -/** - * Right-edge configuration panel for any column. - * - * Shows name / type / unique for every column, plus workflow-specific fields - * (workflow picker, output field, dependencies, run concurrency) when the - * selected type is `'workflow'`. - * - * Three modes: - * - 'edit': modify an existing column. PATCH sends a unified updates payload. - * - 'new': user picked a workflow via Change type → Workflow → [pick]. Nothing - * is persisted yet. Save writes type + workflowConfig + renames in one PATCH. - * - 'create': user picked a workflow from "Add column"; the column doesn't exist yet - * and Save creates it. - * - * Visual styling mirrors the workflow editor's subblock panel (label above - * control, dashed dividers between fields). - */ -export function ColumnSidebar({ - configState, - onClose, - existingColumn, - allColumns, - workflowGroups, - workflows, - workspaceId, - tableId, -}: ColumnSidebarProps) { - const updateColumn = useUpdateColumn({ workspaceId, tableId }) - const addColumn = useAddTableColumn({ workspaceId, tableId }) - const addWorkflowGroup = useAddWorkflowGroup({ workspaceId, tableId }) - const updateWorkflowGroup = useUpdateWorkflowGroup({ workspaceId, tableId }) - const open = configState !== null - - const columnName = configState ? configState.columnName : '' - - /** - * If the column being edited is a workflow output, resolve its parent group - * so we can populate workflow / outputs / dependencies state from it. - */ - const existingGroup = useMemo(() => { - if (!existingColumn?.workflowGroupId) return undefined - return workflowGroups.find((g) => g.id === existingColumn.workflowGroupId) - }, [existingColumn, workflowGroups]) - - const [nameInput, setNameInput] = useState('') - const [typeInput, setTypeInput] = useState('string') - - const isWorkflow = !!existingGroup || configState?.mode === 'new' || typeInput === 'workflow' - - /** - * Show the Column name field whenever a *specific* column is open: scalar - * columns (create or edit) and per-output workflow columns (edit only). Hide - * it when the surface is the workflow-group as a whole — i.e. creating a - * brand-new workflow column where individual output names are auto-derived. - */ - const showColumnNameField = - !isWorkflow || configState?.mode === 'edit' || configState?.mode === 'new' - - /** - * Columns to the left of the current column — these are the only valid trigger - * dependencies, since a workflow column can't depend on values that haven't been - * filled yet. For 'create' mode the column doesn't exist yet, so every existing - * column counts as left of it. - */ - const otherColumns = useMemo(() => { - if (!configState) return [] - if (configState.mode === 'create') return allColumns - const idx = allColumns.findIndex((c) => c.name === configState.columnName) - if (idx === -1) return allColumns.filter((c) => c.name !== configState.columnName) - return allColumns.slice(0, idx) - }, [configState, allColumns]) - - /** - * Split `otherColumns` into the two dep buckets: - * - `scalarDepColumns` — plain columns; tickable into `dependencies.columns`. - * - `groupDepOptions` — producing workflow groups whose outputs land left of the - * current column; tickable into `dependencies.workflowGroups`. A group only - * shows up here when at least one of its output columns is left-of-current. - * The current group itself is excluded so we never depend on ourselves. - */ - const scalarDepColumns = useMemo( - () => otherColumns.filter((c) => !c.workflowGroupId), - [otherColumns] - ) - const groupDepOptions = useMemo(() => { - const seen = new Set() - const result: WorkflowGroup[] = [] - for (const c of otherColumns) { - if (!c.workflowGroupId) continue - if (seen.has(c.workflowGroupId)) continue - if (existingGroup && c.workflowGroupId === existingGroup.id) continue - const g = workflowGroups.find((gg) => gg.id === c.workflowGroupId) - if (!g) continue - seen.add(c.workflowGroupId) - result.push(g) - } - return result - }, [otherColumns, workflowGroups, existingGroup]) - - const [uniqueInput, setUniqueInput] = useState(false) - const [selectedWorkflowId, setSelectedWorkflowId] = useState('') - /** Plain (non-workflow-output) column names this group waits on. */ - const [deps, setDeps] = useState([]) - /** Producing workflow group ids this group waits on. Workflow-output columns are - * represented by their parent group, since the schema validator forbids depending - * on a workflow-output column directly (`workflow-columns.ts` enforces this). */ - const [groupDeps, setGroupDeps] = useState([]) - /** Encoded `${blockId}::${path}` values — disambiguates duplicate paths in the picker. */ - const [selectedOutputs, setSelectedOutputs] = useState([]) - /** Surfaces required-field errors only after a save attempt, matching the workflow editor's deploy flow. */ - const [showValidation, setShowValidation] = useState(false) - /** Save-time error (network/validation thrown by the mutation). Rendered inline next to the footer - * buttons so it isn't covered by the toaster, which sits over the bottom-right of the panel. */ - const [saveError, setSaveError] = useState(null) - /** Run settings (the trigger-deps picker) starts collapsed — outputs are the - * primary task; configuring run timing is rare. */ - const [runSettingsOpen, setRunSettingsOpen] = useState(false) - - const existingColumnRef = useRef(existingColumn) - existingColumnRef.current = existingColumn - const allColumnsRef = useRef(allColumns) - allColumnsRef.current = allColumns - - useEffect(() => { - if (!open || !configState) return - setShowValidation(false) - setSaveError(null) - setRunSettingsOpen(false) - const existing = existingColumnRef.current - const cols = allColumnsRef.current - const leftOfCurrent = (() => { - if (configState.mode === 'create') return cols - const idx = cols.findIndex((c) => c.name === configState.columnName) - if (idx === -1) return cols.filter((c) => c.name !== configState.columnName) - return cols.slice(0, idx) - })() - // Default deps when there's no persisted group yet: tick every left-of-current - // scalar column + every left-of-current producing group. - const defaultScalarDeps = leftOfCurrent.filter((c) => !c.workflowGroupId).map((c) => c.name) - const defaultGroupDeps = (() => { - const seen = new Set() - for (const c of leftOfCurrent) { - if (c.workflowGroupId) seen.add(c.workflowGroupId) - } - return Array.from(seen) - })() - if (configState.mode === 'edit') { - const group = existing?.workflowGroupId - ? workflowGroups.find((g) => g.id === existing.workflowGroupId) - : undefined - // Surface workflow-typed columns as `'workflow'` in the combobox even - // though they're stored as scalar columns under the hood. - setTypeInput(group ? 'workflow' : (existing?.type ?? 'string')) - setUniqueInput(!!existing?.unique) - setNameInput(existing?.name ?? configState.columnName) - if (group) { - setSelectedWorkflowId(group.workflowId) - // Sanitize legacy persisted deps: any workflow-output column names that - // sneaked into `dependencies.columns` (writes from before the schema - // validator forbade them) are lifted into `workflowGroups` here so the - // sidebar surfaces a re-saveable state. - const persistedCols = group.dependencies?.columns - const persistedGroups = group.dependencies?.workflowGroups - if (persistedCols !== undefined || persistedGroups !== undefined) { - const liftedGroupIds = new Set(persistedGroups ?? []) - const cleanCols: string[] = [] - for (const colName of persistedCols ?? []) { - const c = cols.find((cc) => cc.name === colName) - if (c?.workflowGroupId) liftedGroupIds.add(c.workflowGroupId) - else cleanCols.push(colName) - } - setDeps(cleanCols) - setGroupDeps(Array.from(liftedGroupIds)) - } else { - setDeps(defaultScalarDeps) - setGroupDeps(defaultGroupDeps) - } - setSelectedOutputs([]) // re-encoded against current workflow blocks below - } else { - setSelectedWorkflowId('') - setDeps([]) - setGroupDeps([]) - setSelectedOutputs([]) - } - } else { - const workflowId = - 'workflowId' in configState && configState.workflowId ? configState.workflowId : '' - setTypeInput(workflowId ? 'workflow' : 'string') - setUniqueInput(false) - setNameInput(configState.proposedName) - setSelectedWorkflowId(workflowId) - setDeps(defaultScalarDeps) - setGroupDeps(defaultGroupDeps) - setSelectedOutputs([]) - } - }, [open, configState, workflowGroups]) - - const workflowState = useWorkflowState( - open && isWorkflow && selectedWorkflowId ? selectedWorkflowId : undefined - ) - - /** - * Resolves the unified Start block id and its current `inputFormat` field - * names. The "Add inputs" mutation only adds rows for table columns that - * aren't already represented in the start block — clicking the button when - * everything's covered does nothing, so we hide it in that case. - */ - const startBlockInputs = useMemo<{ - blockId: string | null - existingNames: Set - existing: InputFormatField[] - }>(() => { - const blocks = (workflowState.data as { blocks?: Record } | null) - ?.blocks - if (!blocks) return { blockId: null, existingNames: new Set(), existing: [] } - const candidate = TriggerUtils.findStartBlock(blocks, 'manual') - if (!candidate) return { blockId: null, existingNames: new Set(), existing: [] } - const block = blocks[candidate.blockId] as - | { subBlocks?: Record } - | undefined - const existing = normalizeInputFormatValue(block?.subBlocks?.inputFormat?.value) - return { - blockId: candidate.blockId, - existingNames: new Set(existing.map((f) => f.name).filter((n): n is string => !!n)), - existing, - } - }, [workflowState.data]) - - const missingInputColumnNames = useMemo(() => { - if (!startBlockInputs.blockId) return [] - return allColumns - .filter( - (c) => - c.name !== columnName && !c.workflowGroupId && !startBlockInputs.existingNames.has(c.name) - ) - .map((c) => c.name) - }, [allColumns, columnName, startBlockInputs]) - - const queryClient = useQueryClient() - const addInputsMutation = useMutation({ - mutationFn: async () => { - const wfId = selectedWorkflowId - const startBlockId = startBlockInputs.blockId - const state = workflowState.data as WorkflowStatePayload | null | undefined - if (!wfId || !startBlockId || !state || missingInputColumnNames.length === 0) { - throw new Error('Nothing to add') - } - const startBlock = state.blocks[startBlockId] - if (!startBlock) throw new Error('Start block missing from workflow') - - const newFields: InputFormatField[] = missingInputColumnNames.map((name) => { - const col = allColumns.find((c) => c.name === name) - return { - id: generateId(), - name, - type: tableColumnTypeToInputType(col?.type), - value: '', - collapsed: false, - } as InputFormatField & { id: string; collapsed: boolean } - }) - - const updatedSubBlock = { - ...(startBlock.subBlocks?.inputFormat ?? { id: 'inputFormat', type: 'input-format' }), - value: [...startBlockInputs.existing, ...newFields], - } - const updatedBlocks = { - ...state.blocks, - [startBlockId]: { - ...startBlock, - subBlocks: { ...startBlock.subBlocks, inputFormat: updatedSubBlock }, - }, - } - - const rawBody = { - blocks: updatedBlocks, - edges: state.edges, - loops: state.loops, - parallels: state.parallels, - lastSaved: state.lastSaved ?? Date.now(), - isDeployed: state.isDeployed ?? false, - } - // double-cast-allowed: WorkflowStatePayload is the loose local view of - // useWorkflowState; we round-trip it back to the strict PUT body shape. - const body = rawBody as unknown as WorkflowStateContractInput - await requestJson(putWorkflowNormalizedStateContract, { - params: { id: wfId }, - body, - }) - return missingInputColumnNames.length - }, - onSuccess: (added) => { - queryClient.invalidateQueries({ queryKey: workflowKeys.state(selectedWorkflowId) }) - toast.success(`Added ${added} input${added === 1 ? '' : 's'} to start block`) - }, - onError: (err) => { - toast.error(toError(err).message) - }, - }) - - const blockOutputGroups = useMemo(() => { - const state = workflowState.data as - | { - blocks?: Record - edges?: FlattenOutputsEdgeInput[] - } - | null - | undefined - if (!state?.blocks) return [] - - const blocks = Object.values(state.blocks) - const edges = state.edges ?? [] - const flat = flattenWorkflowOutputs(blocks, edges) - if (flat.length === 0) return [] - - const groupsByBlockId = new Map() - for (const f of flat) { - let group = groupsByBlockId.get(f.blockId) - if (!group) { - const blockConfig = getBlock(f.blockType) - const blockColor = blockConfig?.bgColor || '#2F55FF' - let blockIcon: string | React.ComponentType<{ className?: string }> = f.blockName - .charAt(0) - .toUpperCase() - if (blockConfig?.icon) blockIcon = blockConfig.icon - else if (f.blockType === 'loop') blockIcon = RepeatIcon - else if (f.blockType === 'parallel') blockIcon = SplitIcon - group = { - blockId: f.blockId, - blockName: f.blockName, - blockType: f.blockType, - blockIcon, - blockColor, - paths: [], - } - groupsByBlockId.set(f.blockId, group) - } - group.paths.push(f.path) - } - // Sort the picker by execution order (start block first) so it matches the - // saved-column ordering. Unreachable blocks sink to the end. - const distances = getBlockExecutionOrder(blocks, edges) - return Array.from(groupsByBlockId.values()).sort((a, b) => { - const da = distances[a.blockId] - const db = distances[b.blockId] - const sa = da === undefined || da < 0 ? Number.POSITIVE_INFINITY : da - const sb = db === undefined || db < 0 ? Number.POSITIVE_INFINITY : db - return sa - sb - }) - }, [workflowState.data]) - - /** - * Re-encode persisted `{blockId, path}` entries into the picker's encoded form - * once the workflow's blocks are loaded. Stale entries (block deleted or path - * removed) are dropped silently — the user can re-pick on save. - */ - useEffect(() => { - if (!existingGroup?.outputs.length) return - if (selectedOutputs.length > 0) return - if (blockOutputGroups.length === 0) return - const encoded: string[] = [] - for (const entry of existingGroup.outputs) { - const match = blockOutputGroups.find( - (g) => g.blockId === entry.blockId && g.paths.includes(entry.path) - ) - if (match) encoded.push(encodeOutputValue(entry.blockId, entry.path)) - } - if (encoded.length > 0) setSelectedOutputs(encoded) - }, [blockOutputGroups, selectedOutputs.length, existingGroup]) - - const toggleDep = (name: string) => { - setDeps((prev) => (prev.includes(name) ? prev.filter((d) => d !== name) : [...prev, name])) - } - - const toggleGroupDep = (groupId: string) => { - setGroupDeps((prev) => - prev.includes(groupId) ? prev.filter((d) => d !== groupId) : [...prev, groupId] - ) - } - - const toggleOutput = (encoded: string) => { - setSelectedOutputs((prev) => - prev.includes(encoded) ? prev.filter((v) => v !== encoded) : [...prev, encoded] - ) - } - - const typeOptions = useMemo( - () => - COLUMN_TYPE_OPTIONS.filter((o) => o.type !== 'workflow' || !!existingGroup).map((o) => ({ - label: o.label, - value: o.type, - icon: o.icon, - })), - [existingGroup] - ) - - /** - * One-line summary of the trigger picker shown when Run settings is collapsed. - * Lists the dep names ("Run when X, Y, are filled") so the user can see at a - * glance whether anything's gating the group without expanding the section. - */ - const runSettingsSummary = useMemo(() => { - const names: string[] = [...deps] - for (const gid of groupDeps) { - const g = workflowGroups.find((gg) => gg.id === gid) - const wf = workflows?.find((w) => w.id === g?.workflowId) - const label = g?.name ?? wf?.name ?? 'workflow' - names.push(label) - } - if (names.length === 0) return 'Runs as soon as the group is added' - return `Runs when ${names.join(', ')} ${names.length === 1 ? 'is' : 'are'} filled` - }, [deps, groupDeps, workflowGroups, workflows]) - - /** - * Builds the ordered, deduplicated `(blockId, path)` list from the picker - * state, sorted by execution order. Empty array if the user hasn't picked - * anything. - */ - const buildOrderedPickedOutputs = (): Array<{ - blockId: string - path: string - leafType?: string - }> => { - const seen = new Set() - const outputs: Array<{ blockId: string; path: string; leafType?: string }> = [] - for (const encoded of selectedOutputs) { - if (seen.has(encoded)) continue - seen.add(encoded) - outputs.push(decodeOutputValue(encoded)) - } - const wfState = workflowState.data as - | { - blocks?: Record - edges?: FlattenOutputsEdgeInput[] - } - | null - | undefined - if (wfState?.blocks) { - const blocks = Object.values(wfState.blocks) - const edges = wfState.edges ?? [] - const distances = getBlockExecutionOrder(blocks, edges) - const flat = flattenWorkflowOutputs(blocks, edges) - const indexInFlat = new Map( - flat.map((f, i) => [`${f.blockId}${OUTPUT_VALUE_SEPARATOR}${f.path}`, i]) - ) - const leafTypeByKey = new Map( - flat.map((f) => [`${f.blockId}${OUTPUT_VALUE_SEPARATOR}${f.path}`, f.leafType]) - ) - for (const o of outputs) { - o.leafType = leafTypeByKey.get(`${o.blockId}${OUTPUT_VALUE_SEPARATOR}${o.path}`) - } - outputs.sort((a, b) => { - const da = distances[a.blockId] - const db = distances[b.blockId] - const sa = da === undefined || da < 0 ? Number.POSITIVE_INFINITY : da - const sb = db === undefined || db < 0 ? Number.POSITIVE_INFINITY : db - if (sa !== sb) return sa - sb - const ia = - indexInFlat.get(`${a.blockId}${OUTPUT_VALUE_SEPARATOR}${a.path}`) ?? - Number.POSITIVE_INFINITY - const ib = - indexInFlat.get(`${b.blockId}${OUTPUT_VALUE_SEPARATOR}${b.path}`) ?? - Number.POSITIVE_INFINITY - return ia - ib - }) - } - return outputs - } - - const handleSave = async () => { - if (!configState) return - setSaveError(null) - const trimmedName = nameInput.trim() - // Name is required iff the field is shown — when configuring a whole - // workflow group at creation time, per-output column names are auto-derived - // and the field is hidden, so don't gate save on it. - const missing: string[] = [] - if (showColumnNameField && !trimmedName) missing.push('a column name') - if (isWorkflow && !selectedWorkflowId) missing.push('a workflow') - if (isWorkflow && selectedWorkflowId && selectedOutputs.length === 0) { - missing.push('at least one output column') - } - if (missing.length > 0) { - setShowValidation(true) - // Surface a short summary near the Save button too — the inline FieldError - // can be scrolled out of view when the panel content is tall. - setSaveError(`Add ${missing.join(' and ')} before saving.`) - return - } - - try { - if (isWorkflow) { - const orderedOutputs = buildOrderedPickedOutputs() - const dependencies: WorkflowGroupDependencies = { - columns: deps, - ...(groupDeps.length > 0 ? { workflowGroups: groupDeps } : {}), - } - - if (existingGroup) { - // Update path: diff outputs, derive new column names for added entries, - // call updateWorkflowGroup so service handles add/remove transactionally. - // If the sidebar was opened on a *specific* workflow-output column and - // the user renamed it, propagate that into the group's `outputs` ref - // (the column rename itself goes through `updateColumn` below, which - // server-side cascades into outputs/deps — but our outgoing payload - // also has to use the new name so the group update doesn't undo it). - const editedColumnName = configState.mode === 'edit' ? configState.columnName : null - const renamedColumn = - editedColumnName && trimmedName && trimmedName !== editedColumnName - ? { from: editedColumnName, to: trimmedName } - : null - const oldKeys = new Set(existingGroup.outputs.map((o) => `${o.blockId}::${o.path}`)) - const taken = new Set( - allColumns.map((c) => - renamedColumn && c.name === renamedColumn.from ? renamedColumn.to : c.name - ) - ) - const fullOutputs: WorkflowGroupOutput[] = [] - const newOutputColumns: NonNullable = [] - for (const o of orderedOutputs) { - const key = `${o.blockId}::${o.path}` - const existing = existingGroup.outputs.find( - (e) => e.blockId === o.blockId && e.path === o.path - ) - if (existing) { - fullOutputs.push( - renamedColumn && existing.columnName === renamedColumn.from - ? { ...existing, columnName: renamedColumn.to } - : existing - ) - } else { - const colName = deriveOutputColumnName(o.path, taken) - taken.add(colName) - fullOutputs.push({ blockId: o.blockId, path: o.path, columnName: colName }) - newOutputColumns.push({ - name: colName, - type: columnTypeForLeaf(o.leafType), - required: false, - unique: false, - workflowGroupId: existingGroup.id, - }) - } - oldKeys.delete(key) - } - if (renamedColumn) { - await updateColumn.mutateAsync({ - columnName: renamedColumn.from, - updates: { name: renamedColumn.to }, - }) - } - await updateWorkflowGroup.mutateAsync({ - groupId: existingGroup.id, - workflowId: selectedWorkflowId, - name: existingGroup.name, - dependencies, - outputs: fullOutputs, - ...(newOutputColumns.length > 0 ? { newOutputColumns } : {}), - }) - toast.success(`Saved "${existingGroup.name ?? 'Workflow'}"`) - } else { - // Create path: build a fresh group with auto-derived column names. - const groupId = generateId() - const taken = new Set(allColumns.map((c) => c.name)) - const newOutputColumns: AddWorkflowGroupBodyInput['outputColumns'] = [] - const groupOutputs: WorkflowGroupOutput[] = [] - for (const o of orderedOutputs) { - const colName = deriveOutputColumnName(o.path, taken) - taken.add(colName) - newOutputColumns.push({ - name: colName, - type: columnTypeForLeaf(o.leafType), - required: false, - unique: false, - workflowGroupId: groupId, - }) - groupOutputs.push({ blockId: o.blockId, path: o.path, columnName: colName }) - } - const workflowName = - workflows?.find((w) => w.id === selectedWorkflowId)?.name ?? 'Workflow' - const group: WorkflowGroup = { - id: groupId, - workflowId: selectedWorkflowId, - name: workflowName, - dependencies, - outputs: groupOutputs, - } - await addWorkflowGroup.mutateAsync({ group, outputColumns: newOutputColumns }) - toast.success(`Added "${workflowName}"`) - } - } else if (configState.mode === 'create') { - // `isWorkflow` is false here, so `typeInput` is a real ColumnDefinition type. - const scalarType = typeInput as ColumnDefinition['type'] - await addColumn.mutateAsync({ - name: trimmedName, - type: scalarType, - }) - toast.success(`Added "${trimmedName}"`) - } else { - const existing = existingColumnRef.current - const scalarType = typeInput as ColumnDefinition['type'] - const renamed = trimmedName !== configState.columnName - const typeChanged = !!existing && existing.type !== scalarType - const uniqueChanged = !!existing && !!existing.unique !== uniqueInput - - const updates: { - name?: string - type?: ColumnDefinition['type'] - unique?: boolean - } = { - ...(renamed ? { name: trimmedName } : {}), - ...(typeChanged ? { type: scalarType } : {}), - ...(uniqueChanged ? { unique: uniqueInput } : {}), - } - - if (Object.keys(updates).length === 0) { - onClose() - return - } - - await updateColumn.mutateAsync({ - columnName: configState.columnName, - updates, - }) - toast.success(`Saved "${trimmedName}"`) - } - - onClose() - } catch (err) { - setSaveError(toError(err).message) - } - } - - const saveDisabled = updateColumn.isPending || addColumn.isPending - - return ( - - ) -} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu/context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu/context-menu.tsx index dfe0523ba8d..f7c6f4a27a5 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu/context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu/context-menu.tsx @@ -5,7 +5,17 @@ import { DropdownMenuSeparator, DropdownMenuTrigger, } from '@/components/emcn' -import { ArrowDown, ArrowUp, Duplicate, Eye, Pencil, Trash } from '@/components/emcn/icons' +import { + ArrowDown, + ArrowUp, + Duplicate, + Eye, + Pencil, + PlayOutline, + RefreshCw, + Square, + Trash, +} from '@/components/emcn/icons' import type { ContextMenuState } from '../../types' interface ContextMenuProps { @@ -20,6 +30,18 @@ interface ContextMenuProps { canViewExecution?: boolean canEditCell?: boolean selectedRowCount?: number + /** Fires every workflow group on the row(s), skipping already-completed + * cells. Mirrors the action bar's Play. */ + onRunWorkflows?: () => void + /** Re-runs every workflow group on the row(s), including already-completed + * cells. Mirrors the action bar's Refresh. */ + onRefreshWorkflows?: () => void + /** Cancels every running/queued execution on the row(s) the context menu is acting on. */ + onStopWorkflows?: () => void + /** Total running/queued executions across the row(s) under the context menu. Drives the Stop label and visibility. */ + runningInSelectionCount?: number + /** Whether the table has any workflow columns; gates the run-workflows item. */ + hasWorkflowColumns?: boolean disableEdit?: boolean disableInsert?: boolean disableDelete?: boolean @@ -37,11 +59,26 @@ export function ContextMenu({ canViewExecution = false, canEditCell = true, selectedRowCount = 1, + onRunWorkflows, + onRefreshWorkflows, + onStopWorkflows, + runningInSelectionCount = 0, + hasWorkflowColumns = false, disableEdit = false, disableInsert = false, disableDelete = false, }: ContextMenuProps) { const deleteLabel = selectedRowCount > 1 ? `Delete ${selectedRowCount} rows` : 'Delete row' + const runLabel = + selectedRowCount > 1 + ? `Run empty or failed cells on ${selectedRowCount} rows` + : 'Run empty or failed cells' + const refreshLabel = + selectedRowCount > 1 ? `Re-run all cells on ${selectedRowCount} rows` : 'Re-run all cells' + const stopLabel = + runningInSelectionCount === 1 + ? 'Stop running workflow' + : `Stop ${runningInSelectionCount} running workflows` return ( )} + {hasWorkflowColumns && onRunWorkflows && ( + + + {runLabel} + + )} + {hasWorkflowColumns && onRefreshWorkflows && ( + + + {refreshLabel} + + )} + {hasWorkflowColumns && onStopWorkflows && runningInSelectionCount > 0 && ( + + + {stopLabel} + + )} Insert row above diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/index.ts index bc0da8a0717..0fca186c0c6 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/index.ts @@ -1,4 +1,9 @@ +export * from './column-config-sidebar' export * from './context-menu' +export * from './new-column-dropdown' export * from './row-modal' -export * from './table' +export * from './run-status-control' +export * from './table-action-bar' export * from './table-filter' +export * from './table-grid' +export * from './workflow-sidebar' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/new-column-dropdown/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/new-column-dropdown/index.ts new file mode 100644 index 00000000000..026d9ff58f1 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/new-column-dropdown/index.ts @@ -0,0 +1 @@ +export { NewColumnDropdown } from './new-column-dropdown' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/new-column-dropdown/new-column-dropdown.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/new-column-dropdown/new-column-dropdown.tsx new file mode 100644 index 00000000000..8330e581cf5 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/new-column-dropdown/new-column-dropdown.tsx @@ -0,0 +1,79 @@ +'use client' + +import { + Button, + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from '@/components/emcn' +import { Plus } from '@/components/emcn/icons' +import type { ColumnDefinition } from '@/lib/table' +import { COLUMN_TYPE_OPTIONS } from '../column-config-sidebar' + +const CELL_HEADER = + 'border-[var(--border)] border-r border-b bg-[var(--bg)] px-2 py-[7px] text-left align-middle' + +const HEADER_ADD_COLUMN_ICON = + +interface NewColumnDropdownProps { + /** `'header'` renders the page-header trigger (subtle Button); `'inline-header'` renders + * the in-table column-header `
` so it must be a ` : menu +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/run-status-control.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/run-status-control.tsx new file mode 100644 index 00000000000..43640d2d8ae --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/run-status-control.tsx @@ -0,0 +1,41 @@ +'use client' + +import { memo } from 'react' +import { Button } from '@/components/emcn' +import { Loader, Square } from '@/components/emcn/icons' + +interface RunStatusControlProps { + running: number + onStopAll: () => void + isStopping: boolean +} + +/** + * Run-status + Stop-all control rendered in the page header's leading actions + * row when any workflow runs are active. Matches the in-cell running indicator + * (Loader + tertiary text) for consistency. + */ +export const RunStatusControl = memo(function RunStatusControl({ + running, + onStopAll, + isStopping, +}: RunStatusControlProps) { + return ( +
+
+ + {running} + running +
+ +
+ ) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-action-bar/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-action-bar/index.ts new file mode 100644 index 00000000000..1e8041624b8 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-action-bar/index.ts @@ -0,0 +1 @@ +export { TableActionBar } from './table-action-bar' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-action-bar/table-action-bar.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-action-bar/table-action-bar.tsx new file mode 100644 index 00000000000..d807b32a022 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-action-bar/table-action-bar.tsx @@ -0,0 +1,172 @@ +'use client' + +import { AnimatePresence, motion } from 'framer-motion' +import { Button, Tooltip } from '@/components/emcn' +import { Eye, PlayOutline, RefreshCw, Square } from '@/components/emcn/icons' +import { cn } from '@/lib/core/utils/cn' + +interface TableActionBarProps { + /** Number of (row × group) cells the run/stop buttons would target. Drives + * the bar's leading label ("N cells"). */ + selectedCellCount: number + /** Total running/queued workflow cells in the selection. Drives Stop. */ + runningCount: number + /** Whether the table has any workflow columns. The bar hides entirely when + * there are none — Run/Stop have nothing to act on. */ + hasWorkflowColumns: boolean + /** Show the Play (incomplete-mode) button — true when any selected cell is + * empty / errored / cancelled. */ + showPlay: boolean + /** Show the Refresh (all-mode) button — true when any selected cell is + * already completed. */ + showRefresh: boolean + /** Smart run: fire workflows only on cells that are empty / errored / + * cancelled. Maps to server `runMode: 'incomplete'`. */ + onPlay: () => void + /** Forceful re-run: fire workflows on every selected cell, including + * completed ones. Maps to server `runMode: 'all'`. */ + onRefresh: () => void + /** Cancel running/queued cells in the selection. */ + onStopWorkflows: () => void + /** When the user has highlighted exactly one workflow cell (or N adjacent + * cells in the same row + group), surface a "View execution" affordance + * alongside the run buttons. Omit when no single-execution view applies. */ + onViewExecution?: () => void + /** Disables actions while a bulk mutation is in flight. */ + isLoading?: boolean + /** Additional className for the floating wrapper — used to lift the bar + * above bottom-anchored UI like a pagination row. */ + className?: string +} + +/** + * Floating action bar shown at the bottom of the table when one or more + * workflow cells are highlighted. Play / Refresh visibility is data-driven: + * Play appears when there's anything empty/failed in the selection; Refresh + * appears when there's anything already completed; both when the selection is + * mixed. + * + * Rendered with `position: absolute` inside the table's container (not + * `fixed`) so it scopes to the table's bounds — important for embedded mode, + * where the table sits inside a panel and a fixed-positioned bar would land + * centered on the whole viewport instead of the panel. + */ +export function TableActionBar({ + selectedCellCount, + runningCount, + hasWorkflowColumns, + showPlay, + showRefresh, + onPlay, + onRefresh, + onStopWorkflows, + onViewExecution, + isLoading = false, + className, +}: TableActionBarProps) { + const visible = + hasWorkflowColumns && + selectedCellCount > 0 && + (showPlay || showRefresh || runningCount > 0 || Boolean(onViewExecution)) + const stopLabel = + runningCount === 1 ? 'Stop running workflow' : `Stop ${runningCount} running workflows` + const playLabel = + selectedCellCount === 1 ? 'Run cell' : `Run ${selectedCellCount} empty or failed cells` + const refreshLabel = selectedCellCount === 1 ? 'Re-run cell' : `Re-run ${selectedCellCount} cells` + + return ( + + {visible && ( + +
+ + {selectedCellCount === 1 + ? 'Selected 1 workflow cell' + : `Selected ${selectedCellCount} workflow cells`} + + +
+ {showPlay && ( + + + + + {playLabel} + + )} + + {showRefresh && ( + + + + + {refreshLabel} + + )} + + {runningCount > 0 && ( + + + + + {stopLabel} + + )} + + {onViewExecution && ( + + + + + View execution + + )} +
+
+
+ )} +
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter/index.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter/index.ts similarity index 100% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter/index.tsx rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter/index.ts diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-content.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-content.tsx new file mode 100644 index 00000000000..2fbbe78f194 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-content.tsx @@ -0,0 +1,59 @@ +'use client' + +import type { RowExecutionMetadata } from '@/lib/table' +import type { SaveReason } from '../../../types' +import type { DisplayColumn } from '../types' +import { CellRender, resolveCellRender } from './cell-render' +import { InlineEditor } from './inline-editors' + +interface CellContentProps { + value: unknown + exec?: RowExecutionMetadata + column: DisplayColumn + isEditing: boolean + initialCharacter?: string | null + onSave: (value: unknown, reason: SaveReason) => void + onCancel: () => void + /** + * Human-readable labels for unmet deps on this row+group, used to render a + * "Waiting" pill when the cell hasn't run because something it depends on + * is empty. `undefined` (or empty) means no waiting state. + */ + waitingOnLabels?: string[] +} + +/** + * Glue layer: maps cell inputs to a typed `CellRenderKind` (via the pure + * resolver) and renders the corresponding JSX (via the dumb renderer). The + * inline editor sits on top when `isEditing` is true. Adding a new cell + * appearance is a three-step mechanical change in the colocated files. + */ +export function CellContent({ + value, + exec, + column, + isEditing, + initialCharacter, + onSave, + onCancel, + waitingOnLabels, +}: CellContentProps) { + const kind = resolveCellRender({ value, exec, column, waitingOnLabels }) + + return ( + <> + {isEditing && ( +
+ +
+ )} + + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-render.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-render.tsx new file mode 100644 index 00000000000..35eb3d2e8f9 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-render.tsx @@ -0,0 +1,265 @@ +'use client' + +import type React from 'react' +import { Badge, Checkbox, Tooltip } from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' +import type { RowExecutionMetadata } from '@/lib/table' +import { StatusBadge } from '@/app/workspace/[workspaceId]/logs/utils' +import { storageToDisplay } from '../../../utils' +import type { DisplayColumn } from '../types' + +/** + * Discriminated union describing every shape a table cell can take. + * + * Workflow-output cells follow a status state machine: they always render + * *something* (a value, a status pill, or a dash), driven by the combination + * of `executions[groupId]` state and dep satisfaction. Plain (non-workflow) + * cells just render the typed value or empty. + * + * `'empty'` is the universal fallback used by both workflow cells (no exec, + * no value, no waiting) and plain cells (null/undefined value). + * + * Adding a new cell appearance is a three-step mechanical change: add a + * variant here, pick it in `resolveCellRender`, render it in `CellRender`. + * TypeScript's exhaustiveness check on the renderer's `switch` (the + * unreachable default) flags any branch you forgot. + */ +export type CellRenderKind = + // Workflow-output cells + | { kind: 'value'; text: string } + | { kind: 'block-error' } + | { kind: 'running' } + | { kind: 'pending-upstream' } + | { kind: 'queued' } + | { kind: 'cancelled' } + | { kind: 'error' } + | { kind: 'waiting'; labels: string[] } + // Plain typed cells + | { kind: 'boolean'; checked: boolean } + | { kind: 'json'; text: string } + | { kind: 'date'; text: string } + | { kind: 'text'; text: string } + // Universal fallback + | { kind: 'empty' } + +interface ResolveCellRenderInput { + value: unknown + exec: RowExecutionMetadata | undefined + column: DisplayColumn + /** Empty / undefined → not waiting; non-empty → render the Waiting pill. */ + waitingOnLabels: string[] | undefined +} + +/** + * Decide which `CellRenderKind` to render for a cell. Pure — easily + * unit-testable in isolation, no JSX involved. + * + * Order matters for workflow cells: block-error wins over a value (the user + * cares about the failure), value wins over running/queued (we have data + * already), and the running/queued branch deliberately collapses pre-enqueue + * `pending` and post-enqueue `queued` into one `Queued` pill so the cell + * doesn't flicker as the row transitions from one to the other. + */ +export function resolveCellRender({ + value, + exec, + column, + waitingOnLabels, +}: ResolveCellRenderInput): CellRenderKind { + const isNull = value === null || value === undefined + + if (column.workflowGroupId) { + const blockId = column.outputBlockId + const blockError = blockId ? exec?.blockErrors?.[blockId] : undefined + const blockRunning = blockId ? (exec?.runningBlockIds?.includes(blockId) ?? false) : false + const groupHasBlockErrors = !!(exec?.blockErrors && Object.keys(exec.blockErrors).length > 0) + + if (blockError) return { kind: 'block-error' } + + // In-flight wins over the existing value: when the group is being re-run, + // the current value is about to be overwritten — surface the run state so + // the user sees the cell is changing. Without this, a queued / running + // re-run on a previously-completed cell looks like nothing happened until + // the new value lands. + const inFlight = + exec?.status === 'running' || exec?.status === 'queued' || exec?.status === 'pending' + if (inFlight && !(groupHasBlockErrors && !blockRunning)) { + if (blockRunning) return { kind: 'running' } + if (exec?.status === 'queued' || exec?.status === 'pending') return { kind: 'queued' } + // `running` with this block not in `runningBlockIds` = upstream block + // still going; surface as the amber Pending pill per logs convention. + return { kind: 'pending-upstream' } + } + + if (!isNull) return { kind: 'value', text: stringifyValue(value) } + + // Waiting wins over a stale terminal state: if deps are unmet right now, + // the prior `cancelled` / `error` is informational at best — the cell + // can't actually run until the user fills the missing input. Surface the + // actionable state instead of the stale one. + if (waitingOnLabels && waitingOnLabels.length > 0) { + return { kind: 'waiting', labels: waitingOnLabels } + } + if (exec?.status === 'cancelled') return { kind: 'cancelled' } + if (exec?.status === 'error') return { kind: 'error' } + return { kind: 'empty' } + } + + if (column.type === 'boolean') return { kind: 'boolean', checked: Boolean(value) } + if (isNull) return { kind: 'empty' } + if (column.type === 'json') return { kind: 'json', text: JSON.stringify(value) } + if (column.type === 'date') return { kind: 'date', text: String(value) } + return { kind: 'text', text: stringifyValue(value) } +} + +function stringifyValue(value: unknown): string { + if (typeof value === 'string') return value + if (value === null || value === undefined) return '' + return JSON.stringify(value) +} + +interface CellRenderProps { + kind: CellRenderKind + /** When true the static content sits underneath the InlineEditor overlay + * and should be visually hidden (but kept in flow to preserve cell size). */ + isEditing: boolean +} + +/** + * Pure renderer: takes a `CellRenderKind` and returns the JSX. No business + * logic — adding a new cell appearance means adding a new `case` here. The + * exhaustiveness check on the `switch` (the unreachable default) flags any + * variant you forgot to handle. + */ +export function CellRender({ kind, isEditing }: CellRenderProps): React.ReactElement | null { + switch (kind.kind) { + case 'value': + return ( + + {kind.text} + + ) + + case 'block-error': + case 'error': + return ( + + + + ) + + case 'running': + return ( + + + + ) + + case 'pending-upstream': + return ( + + + + ) + + case 'cancelled': + return ( + + + + ) + + case 'queued': + return ( + + + Queued + + + ) + + case 'waiting': + return ( + + + + + + Waiting + + + + + Waiting on {kind.labels.map((l) => `"${l}"`).join(', ')} + + + + ) + + case 'boolean': + return ( +
+ +
+ ) + + case 'json': + return ( + + {kind.text} + + ) + + case 'date': + return ( + + {storageToDisplay(kind.text)} + + ) + + case 'text': + return ( + + {kind.text} + + ) + + case 'empty': + return null + + default: { + // Exhaustiveness guard: TypeScript flags this branch if a new + // `CellRenderKind` variant is added without a matching `case` above. + const _exhaustive: never = kind + return _exhaustive + } + } +} + +/** + * Workflow-output cells are hand-editable; while editing, the static content + * must stay in flow (so the cell doesn't collapse) but be visually hidden so + * the InlineEditor overlay shows through. Plain wrapper around any non-text + * variant. + */ +function Wrap({ isEditing, children }: { isEditing: boolean; children: React.ReactNode }) { + if (!isEditing) return <>{children} + return
{children}
+} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/expanded-cell-popover.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/expanded-cell-popover.tsx similarity index 100% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/expanded-cell-popover.tsx rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/expanded-cell-popover.tsx diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/index.ts new file mode 100644 index 00000000000..c54286afa5f --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/index.ts @@ -0,0 +1,4 @@ +export { CellContent } from './cell-content' +export { CellRender, type CellRenderKind, resolveCellRender } from './cell-render' +export { ExpandedCellPopover } from './expanded-cell-popover' +export { InlineEditor } from './inline-editors' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/inline-editors.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/inline-editors.tsx similarity index 100% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/inline-editors.tsx rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/inline-editors.tsx diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/constants.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/constants.ts similarity index 61% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/constants.ts rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/constants.ts index 28aead32657..69db8b7b4f4 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/constants.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/constants.ts @@ -5,5 +5,6 @@ export const SELECTION_TINT_BG = 'bg-[rgba(37,99,235,0.06)]' * been measured yet and as the initial width for newly-added columns. */ export const COL_WIDTH = 160 -/** Column config sidebar width: roomy by default, bounded on narrow screens. */ -export const COLUMN_SIDEBAR_WIDTH_CSS = 'min(480px, calc(100vw - 48px))' +/** Column config sidebar width in pixels — drives both the sidebar's own width + * and the table's reserved padding-right while a sidebar is open. */ +export const COLUMN_SIDEBAR_WIDTH = 400 diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/column-header-menu.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/column-header-menu.tsx similarity index 83% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/column-header-menu.tsx rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/column-header-menu.tsx index da955ee1322..d7a80f4a507 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/column-header-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/column-header-menu.tsx @@ -35,8 +35,13 @@ interface ColumnHeaderMenuProps { onDragLeave?: () => void workflows?: WorkflowMetadata[] workflowGroups?: WorkflowGroup[] + /** Source-info entry for workflow-output columns; supplies the producing + * block's icon component. The block's color is intentionally not used. */ sourceInfo?: ColumnSourceInfo onOpenConfig: (columnName: string) => void + /** Opens a popup preview of the column's underlying workflow. Surfaced in + * the chevron menu for workflow-output columns. */ + onViewWorkflow?: (workflowId: string) => void } /** @@ -70,6 +75,7 @@ export const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({ workflowGroups, sourceInfo, onOpenConfig, + onViewWorkflow, }: ColumnHeaderMenuProps) { const renameInputRef = useRef(null) const didDragRef = useRef(false) @@ -90,10 +96,6 @@ export const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({ ? 'Hide column' : 'Delete workflow' : undefined - const workflowColor = configuredWorkflow?.color - const blockIconInfo = sourceInfo?.blockIconInfo - const blockName = sourceInfo?.blockName - useEffect(() => { if (isRenaming && renameInputRef.current) { renameInputRef.current.focus() @@ -142,8 +144,13 @@ export const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({ e.dataTransfer.effectAllowed = 'move' e.dataTransfer.setData('text/plain', column.name) + // Workflow-output columns drag as a whole group, so the ghost shows + // the group's name (falling back to the workflow's name, then the + // column slug) rather than the individual column slug. + const ghostLabel = ownGroup?.name ?? configuredWorkflow?.name ?? column.name + const ghost = document.createElement('div') - ghost.textContent = column.name + ghost.textContent = ghostLabel ghost.style.cssText = 'position:absolute;top:-9999px;padding:4px 8px;background:var(--bg);border:1px solid var(--border);border-radius:4px;font-size:13px;font-weight:500;white-space:nowrap;color:var(--text-primary)' document.body.appendChild(ghost) @@ -152,7 +159,7 @@ export const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({ onDragStart?.(column.name) }, - [column.name, readOnly, isRenaming, onDragStart] + [column.name, ownGroup, configuredWorkflow, readOnly, isRenaming, onDragStart] ) const handleDragOver = useCallback( @@ -181,6 +188,11 @@ export const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({ const th = e.currentTarget as HTMLElement const related = e.relatedTarget as Node | null if (related && th.contains(related)) return + // Don't clear when the cursor is moving to another column header — the + // next dragover will set the right target. Clearing here causes the + // drop indicator to flicker between sibling columns of a workflow + // group (and any adjacent column hop in general). + if (related && related instanceof Element && related.closest('th')) return onDragLeave?.() }, [onDragLeave] @@ -238,8 +250,8 @@ export const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
- {column.workflowGroupId ? ( -
- {blockName && ( - - {blockName} - - )} - - {column.headerLabel} - -
- ) : ( - - {column.name} - - )} + + {column.workflowGroupId ? column.headerLabel : column.name} +
) : (
@@ -288,25 +287,12 @@ export const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({ > - {column.workflowGroupId ? ( -
- {blockName && ( - - {blockName} - - )} - - {column.headerLabel} - -
- ) : ( - - {column.name} - - )} + + {column.workflowGroupId ? column.headerLabel : column.name} + onViewWorkflow(ownGroup.workflowId) : undefined + } />
)} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/column-type-icon.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/column-type-icon.tsx new file mode 100644 index 00000000000..e4e4fc51b24 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/column-type-icon.tsx @@ -0,0 +1,50 @@ +'use client' + +import type React from 'react' +import { + Calendar as CalendarIcon, + PlayOutline, + TypeBoolean, + TypeJson, + TypeNumber, + TypeText, +} from '@/components/emcn/icons' +import type { BlockIconInfo } from '../types' + +export const COLUMN_TYPE_ICONS: Record = { + string: TypeText, + number: TypeNumber, + boolean: TypeBoolean, + date: CalendarIcon, + json: TypeJson, +} + +interface ColumnTypeIconProps { + type: string + /** True for workflow-output columns; renders the producing block's icon + * (or a workflow fallback) instead of the scalar type icon. Workflow + * columns ARE stored as scalar types, so without this `type` would + * otherwise resolve to e.g. `string` and read identically to a plain + * text column. */ + isWorkflowColumn?: boolean + /** Block-icon info from the source-info builder, used for workflow columns + * to surface the producing block's icon. The block's color is intentionally + * ignored — icons render in the plain `text-[var(--text-icon)]` tone like + * every other column-type icon, no per-block tint. */ + blockIconInfo?: BlockIconInfo +} + +/** + * Tiny icon shown next to a column header. Workflow-output columns get the + * producing block's icon (falling back to `PlayOutline`); plain columns get + * their scalar type icon. Both render in the same `text-[var(--text-icon)]` + * tone — no per-workflow color, no colored swatch. + */ +export function ColumnTypeIcon({ type, isWorkflowColumn, blockIconInfo }: ColumnTypeIconProps) { + if (isWorkflowColumn) { + const Icon = blockIconInfo?.icon ?? PlayOutline + return + } + const Icon = COLUMN_TYPE_ICONS[type] ?? TypeText + return +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/index.ts new file mode 100644 index 00000000000..8c8ef9f9dc2 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/index.ts @@ -0,0 +1,3 @@ +export { ColumnHeaderMenu } from './column-header-menu' +export { COLUMN_TYPE_ICONS, ColumnTypeIcon } from './column-type-icon' +export { ColumnOptionsMenu, WorkflowGroupMetaCell } from './workflow-group-meta-cell' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/workflow-group-meta-cell.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/workflow-group-meta-cell.tsx similarity index 59% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/workflow-group-meta-cell.tsx rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/workflow-group-meta-cell.tsx index 8b3403053f6..51788975ec9 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/workflow-group-meta-cell.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/headers/workflow-group-meta-cell.tsx @@ -1,7 +1,7 @@ 'use client' import type React from 'react' -import { useCallback, useState } from 'react' +import { useCallback, useRef, useState } from 'react' import { DropdownMenu, DropdownMenuContent, @@ -12,7 +12,16 @@ import { DropdownMenuSubTrigger, DropdownMenuTrigger, } from '@/components/emcn' -import { ArrowLeft, ArrowRight, EyeOff, Pencil, PlayOutline, Trash } from '@/components/emcn/icons' +import { + ArrowLeft, + ArrowRight, + Eye, + EyeOff, + Pencil, + PlayOutline, + Trash, +} from '@/components/emcn/icons' +import type { RunMode } from '@/lib/api/contracts/tables' import { cn } from '@/lib/core/utils/cn' import type { WorkflowMetadata } from '@/stores/workflows/registry/types' import { SELECTION_TINT_BG } from '../constants' @@ -40,8 +49,14 @@ interface ColumnOptionsMenuProps { onDeleteGroup?: () => void /** When provided, the menu is being opened from a workflow-group header and * exposes group-level run actions above the column actions. */ - onRunGroupAll?: () => void - onRunGroupIncomplete?: () => void + onRunColumnAll?: () => void + onRunColumnIncomplete?: () => void + /** When set, surfaces a "Run N selected rows" item above Run all. */ + onRunColumnSelected?: () => void + selectedRowCount?: number + /** When set, the menu surfaces a "View workflow" item that opens a popup + * preview of the configured workflow. */ + onViewWorkflow?: () => void } /** @@ -62,10 +77,14 @@ export function ColumnOptionsMenu({ onInsertRight, onDeleteColumn, onDeleteGroup, - onRunGroupAll, - onRunGroupIncomplete, + onRunColumnAll, + onRunColumnIncomplete, + onRunColumnSelected, + selectedRowCount = 0, + onViewWorkflow, }: ColumnOptionsMenuProps) { - const showRunActions = Boolean(onRunGroupAll && onRunGroupIncomplete) + const showRunActions = Boolean(onRunColumnAll && onRunColumnIncomplete) + const showRunSelected = Boolean(onRunColumnSelected) && selectedRowCount > 0 return ( @@ -97,8 +116,15 @@ export function ColumnOptionsMenu({ Run - onRunGroupAll?.()}>Run all rows - onRunGroupIncomplete?.()}> + {showRunSelected && ( + onRunColumnSelected?.()}> + {`Run ${selectedRowCount} selected ${selectedRowCount === 1 ? 'row' : 'rows'}`} + + )} + onRunColumnAll?.()}> + Run all rows + + onRunColumnIncomplete?.()}> Run empty rows @@ -106,6 +132,12 @@ export function ColumnOptionsMenu({ )} + {onViewWorkflow && ( + onViewWorkflow()}> + + View workflow + + )} onOpenConfig(column.name)}> Edit column @@ -143,12 +175,26 @@ interface WorkflowGroupMetaCellProps { isGroupSelected: boolean onSelectGroup: (startColIndex: number, size: number) => void onOpenConfig: (columnName: string) => void - onRunGroup?: (groupId: string, workflowId: string, mode?: 'all' | 'incomplete') => void + onRunColumn?: (groupId: string, mode?: RunMode, rowIds?: string[]) => void onInsertLeft?: (columnName: string) => void onInsertRight?: (columnName: string) => void onDeleteColumn?: (columnName: string) => void /** Right-click delete on the group header drops the entire workflow group. */ onDeleteGroup?: (groupId: string) => void + /** Row ids in the user's current multi-row selection; when non-empty the + * run menu adds a "Run N selected rows" option. */ + selectedRowIds?: string[] | null + /** Opens a popup preview of the underlying workflow. */ + onViewWorkflow?: (workflowId: string) => void + /** When set, the meta cell becomes draggable and forwards events through + * the same column-reorder pipeline used by individual workflow column + * headers. The whole group moves together because downstream code groups + * fan-out siblings by `workflowGroupId`. */ + onDragStart?: (columnName: string) => void + onDragOver?: (columnName: string, side: 'left' | 'right') => void + onDragEnd?: () => void + onDragLeave?: () => void + readOnly?: boolean } /** @@ -167,11 +213,18 @@ export function WorkflowGroupMetaCell({ isGroupSelected, onSelectGroup, onOpenConfig, - onRunGroup, + onRunColumn, onInsertLeft, onInsertRight, onDeleteColumn, onDeleteGroup, + selectedRowIds, + onViewWorkflow, + onDragStart, + onDragOver, + onDragEnd, + onDragLeave, + readOnly, }: WorkflowGroupMetaCellProps) { const wf = workflows?.find((w) => w.id === workflowId) const color = wf?.color ?? 'var(--text-muted)' @@ -180,14 +233,23 @@ export function WorkflowGroupMetaCell({ const [optionsMenuOpen, setOptionsMenuOpen] = useState(false) const [optionsMenuPosition, setOptionsMenuPosition] = useState({ x: 0, y: 0 }) const [runMenuOpen, setRunMenuOpen] = useState(false) + const didDragRef = useRef(false) + + const selectedCount = selectedRowIds?.length ?? 0 const handleRunAll = useCallback(() => { - if (groupId && workflowId) onRunGroup?.(groupId, workflowId, 'all') - }, [groupId, workflowId, onRunGroup]) + if (groupId) onRunColumn?.(groupId, 'all') + }, [groupId, onRunColumn]) const handleRunIncomplete = useCallback(() => { - if (groupId && workflowId) onRunGroup?.(groupId, workflowId, 'incomplete') - }, [groupId, workflowId, onRunGroup]) + if (groupId) onRunColumn?.(groupId, 'incomplete') + }, [groupId, onRunColumn]) + + const handleRunSelected = useCallback(() => { + if (groupId && selectedRowIds && selectedRowIds.length > 0) { + onRunColumn?.(groupId, 'all', selectedRowIds) + } + }, [groupId, onRunColumn, selectedRowIds]) const handleContextMenu = useCallback( (e: React.MouseEvent) => { @@ -207,18 +269,88 @@ export function WorkflowGroupMetaCell({ // should select the group + open the config sidebar. const target = e.target as HTMLElement if (target.closest('button, [role="menuitem"], [role="menu"]')) return + // Drag-vs-click guard: when a drag just ended on this cell, swallow the + // synthetic click so we don't accidentally pop open the sidebar. + if (didDragRef.current) { + didDragRef.current = false + return + } onSelectGroup(startColIndex, size) if (columnName) onOpenConfig(columnName) }, [columnName, onOpenConfig, onSelectGroup, size, startColIndex] ) + const handleDragStart = useCallback( + (e: React.DragEvent) => { + if (readOnly || !onDragStart || !columnName) { + e.preventDefault() + return + } + didDragRef.current = true + e.dataTransfer.effectAllowed = 'move' + e.dataTransfer.setData('text/plain', columnName) + + const ghost = document.createElement('div') + ghost.textContent = name + ghost.style.cssText = + 'position:absolute;top:-9999px;padding:4px 8px;background:var(--bg);border:1px solid var(--border);border-radius:4px;font-size:13px;font-weight:500;white-space:nowrap;color:var(--text-primary)' + document.body.appendChild(ghost) + e.dataTransfer.setDragImage(ghost, ghost.offsetWidth / 2, ghost.offsetHeight / 2) + requestAnimationFrame(() => ghost.parentNode?.removeChild(ghost)) + + onDragStart(columnName) + }, + [columnName, name, onDragStart, readOnly] + ) + + const handleDragOver = useCallback( + (e: React.DragEvent) => { + if (!onDragOver || !columnName) return + e.preventDefault() + e.dataTransfer.dropEffect = 'move' + const rect = (e.currentTarget as HTMLElement).getBoundingClientRect() + const midX = rect.left + rect.width / 2 + const side = e.clientX < midX ? 'left' : 'right' + onDragOver(columnName, side) + }, + [columnName, onDragOver] + ) + + const handleDragEnd = useCallback(() => { + didDragRef.current = false + onDragEnd?.() + }, [onDragEnd]) + + const handleDragLeave = useCallback( + (e: React.DragEvent) => { + const th = e.currentTarget as HTMLElement + const related = e.relatedTarget as Node | null + if (related && th.contains(related)) return + if (related && related instanceof Element && related.closest('th')) return + onDragLeave?.() + }, + [onDragLeave] + ) + + const handleDrop = useCallback((e: React.DragEvent) => { + e.preventDefault() + }, []) + + const isDraggable = !readOnly && Boolean(onDragStart) + return (
diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/index.ts new file mode 100644 index 00000000000..306193d764b --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/index.ts @@ -0,0 +1 @@ +export { type SelectionSnapshot, TableGrid } from './table-grid' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/table.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-grid.tsx similarity index 74% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/table.tsx rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-grid.tsx index cbe1f625f43..bc97570b4ff 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/table.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-grid.tsx @@ -2,52 +2,23 @@ import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { Square } from 'lucide-react' -import { useParams, useRouter } from 'next/navigation' +import { useParams } from 'next/navigation' import { usePostHog } from 'posthog-js/react' -import { - Button, - Checkbox, - Download, - Modal, - ModalBody, - ModalContent, - ModalFooter, - ModalHeader, - Skeleton, - toast, - Upload, -} from '@/components/emcn' -import { - Pencil, - PlayOutline, - Plus, - Table as TableIcon, - TableX, - Trash, -} from '@/components/emcn/icons' -import { Loader } from '@/components/emcn/icons/loader' +import { Button, Checkbox, Skeleton, toast } from '@/components/emcn' +import { PlayOutline, Plus, Square, TableX } from '@/components/emcn/icons' +import type { RunMode } from '@/lib/api/contracts/tables' import { cn } from '@/lib/core/utils/cn' import { captureEvent } from '@/lib/posthog/client' -import type { ColumnDefinition, Filter, SortDirection, TableRow as TableRowType } from '@/lib/table' -import type { ColumnOption, SortConfig } from '@/app/workspace/[workspaceId]/components' -import { ResourceHeader, ResourceOptionsBar } from '@/app/workspace/[workspaceId]/components' -import { LogDetails } from '@/app/workspace/[workspaceId]/logs/components' +import type { ColumnDefinition, TableRow as TableRowType, WorkflowGroup } from '@/lib/table' +import { getUnmetGroupDeps, isExecInFlight } from '@/lib/table/deps' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' -import { ImportCsvDialog } from '@/app/workspace/[workspaceId]/tables/components/import-csv-dialog' -import { useLogByExecutionId } from '@/hooks/queries/logs' import { - downloadTableExport, useAddTableColumn, useBatchCreateTableRows, useBatchUpdateTableRows, - useCancelTableRuns, useCreateTableRow, useDeleteColumn, - useDeleteTable, useDeleteWorkflowGroup, - useRenameTable, - useRunGroup, useUpdateColumn, useUpdateTableMetadata, useUpdateTableRow, @@ -55,28 +26,30 @@ import { } from '@/hooks/queries/tables' import { useInlineRename } from '@/hooks/use-inline-rename' import { extractCreatedRowId, useTableUndo } from '@/hooks/use-table-undo' -import { useLogDetailsUIStore } from '@/stores/logs/store' import type { DeletedRowSnapshot } from '@/stores/table/types' -import { useContextMenu, useRowExecution, useTable } from '../../hooks' +import { useContextMenu, useTable } from '../../hooks' import type { EditingCell, QueryOptions, SaveReason } from '../../types' -import { cleanCellValue, storageToDisplay } from '../../utils' -import { type ColumnConfigState, ColumnSidebar } from '../column-sidebar/column-sidebar' +import { + cleanCellValue, + generateColumnName as sharedGenerateColumnName, + storageToDisplay, +} from '../../utils' +import type { ColumnConfig } from '../column-config-sidebar' import { ContextMenu } from '../context-menu' -import { RowModal } from '../row-modal' -import { TableFilter } from '../table-filter' -import { CellContent } from './cells/cell-content' -import { ExpandedCellPopover } from './cells/expanded-cell-popover' -import { COL_WIDTH, COLUMN_SIDEBAR_WIDTH_CSS, SELECTION_TINT_BG } from './constants' -import { ColumnHeaderMenu } from './headers/column-header-menu' -import { COLUMN_TYPE_ICONS } from './headers/column-type-icon' -import { WorkflowGroupMetaCell } from './headers/workflow-group-meta-cell' +import { NewColumnDropdown } from '../new-column-dropdown' +import { RunStatusControl } from '../run-status-control' +import type { WorkflowConfig } from '../workflow-sidebar' +import { CellContent, ExpandedCellPopover } from './cells' +import { COL_WIDTH, SELECTION_TINT_BG } from './constants' +import { ColumnHeaderMenu, WorkflowGroupMetaCell } from './headers' import type { DisplayColumn } from './types' import { - areRowDepsSatisfied, buildHeaderGroups, type CellCoord, + classifyExecStatusMix, collectRowSnapshots, computeNormalizedSelection, + type ExecStatusMix, expandToDisplayColumns, moveCell, type NormalizedSelection, @@ -120,10 +93,23 @@ function rowSelectionCoversAll(sel: RowSelection, rows: TableRowType[]): boolean const COL_WIDTH_MIN = 80 const COL_WIDTH_AUTO_FIT_MAX = 1000 // Wide enough to host the row-number + per-row run button side by side. -// Single-digit row numbers (rows 1–9) and multi-digit (10+) need to render -// with the play button at the same x-position so the column doesn't reflow +// Single-digit row numbers (rows 1–9) and multi-digit need to render with +// the play button at the same x-position so the column doesn't reflow // row-by-row. -const CHECKBOX_COL_WIDTH = 56 +// +// Bucketed by the table's plan-derived `maxRows`, not the live count: a small +// table sized for ≤9,999 always renders the narrow gutter; an enterprise +// table sized up to 9,999,999 always renders the wide one. The gutter never +// changes width as rows are added. +// +// Tables without workflow columns drop the per-row run button (~28px), so +// the gutter shrinks accordingly. +const CHECKBOX_COL_WIDTH_SMALL_WITH_RUN = 48 +const CHECKBOX_COL_WIDTH_SMALL_NUMBER_ONLY = 32 +const CHECKBOX_COL_WIDTH_LARGE_WITH_RUN = 68 +const CHECKBOX_COL_WIDTH_LARGE_NUMBER_ONLY = 52 +/** Bucket boundary: tables sized for >9,999 rows get the wide gutter. */ +const LARGE_ROW_NUMBER_THRESHOLD = 10000 const ADD_COL_WIDTH = 120 const SKELETON_COL_COUNT = 4 const SKELETON_ROW_COUNT = 10 @@ -131,29 +117,156 @@ const ROW_HEIGHT_ESTIMATE = 35 const CELL = 'border-[var(--border)] border-r border-b px-2 py-[7px] align-middle select-none' const CELL_CHECKBOX = - 'border-[var(--border)] border-r border-b px-1 py-[7px] align-middle select-none' + 'sticky left-0 z-[6] border-[var(--border)] border-r border-b bg-[var(--bg)] px-1 py-[7px] align-middle select-none' const CELL_HEADER = 'border-[var(--border)] border-r border-b bg-[var(--bg)] px-2 py-[7px] text-left align-middle' const CELL_HEADER_CHECKBOX = - 'border-[var(--border)] border-r border-b bg-[var(--bg)] px-1 py-[7px] text-center align-middle' + 'sticky left-0 z-[12] border-[var(--border)] border-r border-b bg-[var(--bg)] px-1 py-[7px] text-center align-middle' +// Fixed height (not min-) so a Badge-rendered status pill doesn't make the row +// grow vs a plain-text neighbor. Sized to comfortably contain the badge; the +// flex centers plain text + badges on the same baseline. const CELL_CONTENT = - 'relative min-h-[20px] min-w-0 overflow-clip text-ellipsis whitespace-nowrap text-small' + 'relative flex h-[22px] min-w-0 items-center overflow-clip text-ellipsis whitespace-nowrap text-small' const SELECTION_OVERLAY = 'pointer-events-none absolute -top-px -right-px -bottom-px -left-px z-[5] border-[2px] border-[var(--selection)]' -interface TableProps { +/** + * Snapshot of grid selection state the wrapper needs to render ``. + * Fired from a `useEffect` so the callback identity doesn't drive re-renders. + */ +export interface SelectionSnapshot { + /** Row ids in the action-bar selection (checkbox-row union with multi-row range). */ + actionBarRowIds: string[] + /** Total running/queued workflow runs across `actionBarRowIds`. */ + runningInActionBarSelection: number + /** Total running/queued workflow runs across ALL rows. Drives the page-header + * RunStatusControl ("N running, Stop all"). */ + totalRunning: number + /** Whether the table has any workflow-output columns (drives the Run/Stop visibility). */ + hasWorkflowColumns: boolean + /** Cells the Play / Refresh / Stop buttons act on. Null when the selection + * contains no workflow output cells. */ + selectedRunScope: { groupIds: string[]; rowIds: string[] } | null + /** Drives Play (`hasIncompleteOrFailed`) / Refresh (`hasCompleted`) / + * Stop (`hasInFlight`) visibility on the action bar. */ + selectionStats: ExecStatusMix + /** + * When the highlight resolves to exactly one workflow-group execution — + * same row, every highlighted column in the same workflow group — describe + * it so the action bar can offer "View execution". Covers both the 1×1 + * single-cell case and 1 row × N cols highlights within one group. `null` + * for multi-row, cross-group, or plain-column selections. + */ + singleWorkflowCell: { + rowId: string + groupId: string + executionId: string | null + /** True iff the exec is in a state that produced a server log + * (completed / error / running). Drives the View execution button. */ + canViewExecution: boolean + } | null +} + +interface TableGridProps { workspaceId?: string tableId?: string embedded?: boolean + /** + * Pixel width to reserve on the right of the table's scroll content for the + * currently-open slideout panel (column config, workflow config, or log + * details). Computed by the wrapper so it can subscribe to whichever panel + * width source is relevant. `0` when no panel is open. + */ + sidebarReservedPx: number + /** + * Open requests fired by the grid (column header click, "+ New column" + * dropdown, context-menu items). The wrapper owns the actual panel state + * and enforces mutual-exclusion (only one slideout open at a time). + */ + onOpenColumnConfig: (cfg: ColumnConfig) => void + onOpenWorkflowConfig: (cfg: WorkflowConfig) => void + onOpenExecutionDetails: (executionId: string) => void + /** Open the row-edit modal for `row`. Wrapper renders the modal. */ + onOpenRowModal: (row: TableRowType) => void + /** Open the row-delete modal for `snapshots`. Wrapper renders the modal. */ + onRequestDeleteRows: (snapshots: DeletedRowSnapshot[]) => void + /** Open the delete-columns confirmation modal for `names`. Wrapper renders the modal. */ + onRequestDeleteColumns: (names: string[]) => void + /** Fire run for a single column (meta-cell Run menu). */ + onRunColumn: (groupId: string, runMode: RunMode, rowIds?: string[]) => void + /** Fire every runnable column on a single row (per-row gutter Play). */ + onRunRow: (rowId: string) => void + /** Fan out a run across every workflow group on `rowIds`. Used by context menu. */ + onRunRows: (rowIds: string[], runMode: RunMode) => void + /** Stop running workflows on `rowIds`. Per-row gutter Stop also funnels through here. */ + onStopRows: (rowIds: string[]) => void + /** Single-row stop for the per-row gutter button. */ + onStopRow: (rowId: string) => void + /** Wholesale cancel — page-header "Stop all". */ + onStopAll: () => void + /** Whether `useCancelTableRuns` is currently in flight. */ + cancelRunsPending: boolean + /** + * Fired whenever the action-bar selection or running-count derivations + * change. Wrapper uses this to render . + */ + onSelectionChange: (state: SelectionSnapshot) => void + /** Filter + sort. Lifted to wrapper so a single `useTable` call serves both. */ + queryOptions: QueryOptions + /** + * Ref the grid populates with its `handleColumnRename` so the wrapper's + * sidebars can fire a column rename back into the grid (rewrites local + * `columnWidths` / `columnOrder` keys). The wrapper just forwards the call. + */ + columnRenameSinkRef: React.MutableRefObject<((oldName: string, newName: string) => void) | null> + /** + * Ref the grid populates with its post-row-delete cleanup (push undo, + * clear selection). The wrapper invokes after the row-delete modal's + * mutation succeeds. + */ + afterDeleteRowsSinkRef: React.MutableRefObject<((snapshots: DeletedRowSnapshot[]) => void) | null> + /** + * Ref the grid populates with its full delete-columns cascade (per-column + * mutation, undo push, columnOrder + columnWidths cleanup). The wrapper's + * delete-columns confirmation modal invokes this on confirm. + */ + confirmDeleteColumnsSinkRef: React.MutableRefObject<((names: string[]) => void) | null> + /** + * Ref the grid populates with its `pushUndo({ type: 'rename-table', ... })` + * call. The wrapper's table-rename `onSave` invokes this so the rename is + * undoable from anywhere in the grid. + */ + pushTableRenameUndoSinkRef: React.MutableRefObject< + ((previousName: string, newName: string) => void) | null + > } -export function Table({ +export function TableGrid({ workspaceId: propWorkspaceId, tableId: propTableId, embedded, -}: TableProps = {}) { + sidebarReservedPx, + onOpenColumnConfig, + onOpenWorkflowConfig, + onOpenExecutionDetails, + onOpenRowModal, + onRequestDeleteRows, + onRequestDeleteColumns, + onRunColumn, + onRunRow, + onRunRows, + onStopRows, + onStopRow, + onStopAll, + cancelRunsPending, + onSelectionChange, + queryOptions, + columnRenameSinkRef, + afterDeleteRowsSinkRef, + confirmDeleteColumnsSinkRef, + pushTableRenameUndoSinkRef, +}: TableGridProps) { const params = useParams() - const router = useRouter() const workspaceId = propWorkspaceId || (params.workspaceId as string) const tableId = propTableId || (params.tableId as string) const posthog = usePostHog() @@ -163,12 +276,6 @@ export function Table({ captureEvent(posthog, 'table_opened', { table_id: tableId, workspace_id: workspaceId }) }, [tableId, workspaceId, posthog]) - const [queryOptions, setQueryOptions] = useState({ - filter: null, - sort: null, - }) - const [editingRow, setEditingRow] = useState(null) - const [deletingRows, setDeletingRows] = useState([]) const [editingCell, setEditingCell] = useState(null) const [initialCharacter, setInitialCharacter] = useState(null) const [expandedCell, setExpandedCell] = useState(null) @@ -178,10 +285,6 @@ export function Table({ const [isColumnSelection, setIsColumnSelection] = useState(false) const lastCheckboxRowRef = useRef(null) const isColumnSelectionRef = useRef(false) - const [showDeleteTableConfirm, setShowDeleteTableConfirm] = useState(false) - const [deletingColumns, setDeletingColumns] = useState(null) - const [isImportCsvOpen, setIsImportCsvOpen] = useState(false) - const [columnWidths, setColumnWidths] = useState>({}) const columnWidthsRef = useRef(columnWidths) columnWidthsRef.current = columnWidths @@ -217,7 +320,6 @@ export function Table({ tableWorkflowGroups, workflowStates, columnSourceInfo, - workflowNameById, } = useTable({ workspaceId, tableId, queryOptions }) const fetchNextPageRef = useRef(fetchNextPage) @@ -231,6 +333,9 @@ export function Table({ const userPermissions = useUserPermissionsContext() const canEditRef = useRef(userPermissions.canEdit) canEditRef.current = userPermissions.canEdit + // Refs for callback props read inside effects with stable empty deps. + const onOpenRowModalRef = useRef(onOpenRowModal) + onOpenRowModalRef.current = onOpenRowModal const { contextMenu, @@ -238,7 +343,6 @@ export function Table({ closeContextMenu, } = useContextMenu() - const { runWorkflowGroup } = useRowExecution() const workflowsRef = useRef(workflows) workflowsRef.current = workflows @@ -250,18 +354,21 @@ export function Table({ const updateColumnMutation = useUpdateColumn({ workspaceId, tableId }) const deleteColumnMutation = useDeleteColumn({ workspaceId, tableId }) const updateMetadataMutation = useUpdateTableMetadata({ workspaceId, tableId }) - const cancelRunsMutation = useCancelTableRuns({ workspaceId, tableId }) - const runGroupMutation = useRunGroup({ workspaceId, tableId }) const deleteWorkflowGroupMutation = useDeleteWorkflowGroup({ workspaceId, tableId }) const updateWorkflowGroupMutation = useUpdateWorkflowGroup({ workspaceId, tableId }) - const handleRunGroup = useCallback( - (groupId: string, workflowId: string, runMode: 'all' | 'incomplete' = 'all') => { - runGroupMutation.mutate({ groupId, workflowId, runMode }) + const handleRunColumn = useCallback( + (groupId: string, runMode: RunMode = 'all', rowIds?: string[]) => { + onRunColumn(groupId, runMode, rowIds) }, - // mutate is stable; intentionally excluded from deps - // eslint-disable-next-line react-hooks/exhaustive-deps - [] + [onRunColumn] + ) + + const handleViewWorkflow = useCallback( + (workflowId: string) => { + window.open(`/workspace/${workspaceId}/w/${workflowId}`, '_blank', 'noopener,noreferrer') + }, + [workspaceId] ) function handleColumnOrderChange(order: string[]) { @@ -296,6 +403,9 @@ export function Table({ ...(updatedOrder ? { columnOrder: updatedOrder } : {}), }) } + // Populate the wrapper's sink so its sidebars can fire renames back into + // the grid. Reads through refs, so identity stability isn't required. + columnRenameSinkRef.current = handleColumnRename function getColumnWidths() { return columnWidthsRef.current @@ -341,6 +451,24 @@ export function Table({ return expandToDisplayColumns(ordered, tableWorkflowGroups) }, [columns, columnOrder, tableWorkflowGroups]) + const hasWorkflowColumns = columns.some((c) => !!c.workflowGroupId) + /** + * The sticky left column hosts the row number / checkbox always, plus a + * per-row run button only when the table has workflow columns. Width is + * picked from the table's plan-derived `maxRows` so a free-tier table + * (≤9,999) gets the narrow gutter and an enterprise table (up to + * 9,999,999) gets the wide one. Bucketed, not continuous, so the gutter + * never reflows as rows are added. + */ + const isLargeRowCountTable = (tableData?.maxRows ?? 0) >= LARGE_ROW_NUMBER_THRESHOLD + const checkboxColWidth = isLargeRowCountTable + ? hasWorkflowColumns + ? CHECKBOX_COL_WIDTH_LARGE_WITH_RUN + : CHECKBOX_COL_WIDTH_LARGE_NUMBER_ONLY + : hasWorkflowColumns + ? CHECKBOX_COL_WIDTH_SMALL_WITH_RUN + : CHECKBOX_COL_WIDTH_SMALL_NUMBER_ONLY + const headerGroups = useMemo( () => buildHeaderGroups(displayColumns, tableWorkflowGroups), [displayColumns, tableWorkflowGroups] @@ -357,18 +485,18 @@ export function Table({ const colsWidth = isLoadingTable ? displayColCount * COL_WIDTH : displayColumns.reduce((sum, col) => sum + (columnWidths[col.key] ?? COL_WIDTH), 0) - return CHECKBOX_COL_WIDTH + colsWidth + ADD_COL_WIDTH - }, [isLoadingTable, displayColCount, displayColumns, columnWidths]) + return checkboxColWidth + colsWidth + ADD_COL_WIDTH + }, [isLoadingTable, displayColCount, displayColumns, columnWidths, checkboxColWidth]) const resizeIndicatorLeft = useMemo(() => { if (!resizingColumn) return 0 - let left = CHECKBOX_COL_WIDTH + let left = checkboxColWidth for (const col of displayColumns) { left += columnWidths[col.key] ?? COL_WIDTH if (col.key === resizingColumn) return left } return 0 - }, [resizingColumn, displayColumns, columnWidths]) + }, [resizingColumn, displayColumns, columnWidths, checkboxColWidth]) const dropColumnBounds = useMemo(() => { if (!dropTargetColumnName || !dragColumnName) return null @@ -389,7 +517,7 @@ export function Table({ (dropSide === 'left' && targetGroupStart === dragGroup + dragGroupSize) if (wouldBeNoOp) return null - let left = CHECKBOX_COL_WIDTH + let left = checkboxColWidth for (let i = 0; i < cols.length; i++) { const col = cols[i] const w = columnWidths[col.key] ?? COL_WIDTH @@ -408,7 +536,14 @@ export function Table({ left += w } return null - }, [dropTargetColumnName, dragColumnName, dropSide, displayColumns, columnWidths]) + }, [ + dropTargetColumnName, + dragColumnName, + dropSide, + displayColumns, + columnWidths, + checkboxColWidth, + ]) const isAllRowsSelected = useMemo( () => rowSelectionCoversAll(rowSelection, rows), @@ -438,23 +573,6 @@ export function Table({ selectionFocusRef.current = selectionFocus isColumnSelectionRef.current = isColumnSelection - const deleteTableMutation = useDeleteTable(workspaceId) - const renameTableMutation = useRenameTable(workspaceId) - - const tableHeaderRename = useInlineRename({ - onSave: (_id, name) => { - if (tableData) { - pushUndoRef.current({ - type: 'rename-table', - tableId, - previousName: tableData.name, - newName: name, - }) - } - renameTableMutation.mutate({ tableId, name }) - }, - }) - const columnRename = useInlineRename({ onSave: (columnName, newName) => { pushUndoRef.current({ type: 'rename-column', oldName: columnName, newName }) @@ -463,21 +581,6 @@ export function Table({ }, }) - const handleNavigateBack = useCallback(() => { - router.push(`/workspace/${workspaceId}/tables`) - }, [router, workspaceId]) - - const handleDeleteTable = useCallback(async () => { - try { - await deleteTableMutation.mutateAsync(tableId) - setShowDeleteTableConfirm(false) - router.push(`/workspace/${workspaceId}/tables`) - } catch { - setShowDeleteTableConfirm(false) - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [tableId, router, workspaceId]) - const toggleBooleanCell = useCallback( (rowId: string, columnName: string, currentValue: unknown) => { const newValue = !currentValue @@ -543,11 +646,11 @@ export function Table({ } if (snapshots.length > 0) { - setDeletingRows(snapshots) + onRequestDeleteRows(snapshots) } closeContextMenu() - }, [contextMenu.row, closeContextMenu]) + }, [contextMenu.row, closeContextMenu, onRequestDeleteRows]) const handleInsertRow = useCallback( (offset: 0 | 1) => { @@ -575,27 +678,38 @@ export function Table({ const contextMenuColumnInfo = useMemo<{ isWorkflowColumn: boolean executionId: string | null + hasStartedRun: boolean }>(() => { if (!contextMenu.row || !contextMenu.columnName) { - return { isWorkflowColumn: false, executionId: null } + return { isWorkflowColumn: false, executionId: null, hasStartedRun: false } } const column = columnsRef.current.find((c) => c.name === contextMenu.columnName) const groupId = column?.workflowGroupId if (!column || !groupId) { - return { isWorkflowColumn: false, executionId: null } + return { isWorkflowColumn: false, executionId: null, hasStartedRun: false } } const exec = contextMenu.row.executions?.[groupId] - return { isWorkflowColumn: true, executionId: exec?.executionId ?? null } + // Only `completed` / `error` / `running` cells are guaranteed to have a + // server-side execution log. `queued` / `pending` haven't started yet; + // `cancelled` may have been cancelled before the worker ever picked the + // job up, so its executionId can't be relied on either. + const hasStartedRun = + exec?.status === 'completed' || exec?.status === 'error' || exec?.status === 'running' + return { + isWorkflowColumn: true, + executionId: exec?.executionId ?? null, + hasStartedRun, + } }, [contextMenu.row, contextMenu.columnName]) const contextMenuExecutionId = contextMenuColumnInfo.executionId const contextMenuIsWorkflowColumn = contextMenuColumnInfo.isWorkflowColumn + const contextMenuHasStartedRun = contextMenuColumnInfo.hasStartedRun const handleViewExecution = useCallback(() => { if (!contextMenuExecutionId) return - setConfigState(null) - setExecutionDetailsId(contextMenuExecutionId) + onOpenExecutionDetails(contextMenuExecutionId) closeContextMenu() - }, [contextMenuExecutionId, closeContextMenu]) + }, [contextMenuExecutionId, onOpenExecutionDetails, closeContextMenu]) const handleDuplicateRow = useCallback(() => { const contextRow = contextMenu.row @@ -764,6 +878,20 @@ export function Table({ lastCheckboxRowRef.current = null }, []) + // Populate the wrapper's after-delete sink so the row-delete modal can run + // grid cleanup (push undo + clear selection) once its mutation succeeds. + afterDeleteRowsSinkRef.current = (snapshots: DeletedRowSnapshot[]) => { + pushUndoRef.current({ type: 'delete-rows', rows: snapshots }) + handleClearSelection() + } + + // Populate the wrapper's table-rename undo sink. The wrapper's + // breadcrumb rename calls back here so the rename is part of the grid's undo + // stack (Cmd-Z restores the previous name). + pushTableRenameUndoSinkRef.current = (previousName: string, newName: string) => { + pushUndoRef.current({ type: 'rename-table', tableId, previousName, newName }) + } + const handleColumnSelect = useCallback((colIndex: number, shiftKey: boolean) => { const lastRow = rowsRef.current.length - 1 if (lastRow < 0) return @@ -900,19 +1028,29 @@ export function Table({ }, []) const handleColumnDragOver = useCallback((columnName: string, side: 'left' | 'right') => { + const dragged = dragColumnNameRef.current + const cols = schemaColumnsRef.current + const targetCol = cols.find((c) => c.name === columnName) + const targetGid = targetCol?.workflowGroupId + // Suppress drop targeting while hovering siblings of the dragged column's // own group: reordering inside a group is meaningless (the group renders // as a unit) and the chasing indicator just flickers. - const dragged = dragColumnNameRef.current if (dragged) { - const cols = schemaColumnsRef.current const draggedGid = cols.find((c) => c.name === dragged)?.workflowGroupId - const targetGid = cols.find((c) => c.name === columnName)?.workflowGroupId if (draggedGid && draggedGid === targetGid) { if (dropTargetColumnNameRef.current !== null) setDropTargetColumnName(null) return } } + + // Workflow groups: skip per-`
` trigger. Same dropdown content either way. */ + trigger: 'header' | 'inline-header' + disabled: boolean + onPickType: (type: ColumnDefinition['type']) => void + onPickWorkflow: () => void +} + +/** + * "+ New column" dropdown — the single entry point for creating a column. + * Lists every column type plus "Workflow"; picking a type opens the right + * sidebar pre-seeded. + */ +export function NewColumnDropdown({ + trigger, + disabled, + onPickType, + onPickWorkflow, +}: NewColumnDropdownProps) { + const menu = ( + + + {trigger === 'header' ? ( + + ) : ( + + )} + + + {COLUMN_TYPE_OPTIONS.map((option) => { + const Icon = option.icon + const onSelect = + option.type === 'workflow' + ? onPickWorkflow + : () => onPickType(option.type as ColumnDefinition['type']) + return ( + + + {option.label} + + ) + })} + + + ) + + // The in-table trigger lives inside a `
`. The + // header trigger lives in the page header so it sits inline. + return trigger === 'inline-header' ? {menu}
{name} - {onRunGroup && ( + {onRunColumn && (
` writes and let `handleScrollDragOver` + // do the bookkeeping. The scroll handler computes side from the group's + // full bounds, so it stays stable across sibling cursor moves; the per-th + // events would otherwise oscillate name + side as the cursor crosses each + // sibling's midpoint. + if (targetGid) return + if (columnName === dropTargetColumnNameRef.current && side === dropSideRef.current) return setDropTargetColumnName(columnName) setDropSide(side) @@ -931,7 +1069,15 @@ export function Table({ const side = dropSideRef.current if (target && dragged !== target) { const schemaCols = schemaColumnsRef.current - const currentOrder = columnOrderRef.current ?? schemaCols.map((c) => c.name) + // `columnOrder` is the user-edited persisted order. Tables created + // before the server kept it in sync with `addColumn` may have entries + // missing — append any unknown schema names so the dragged column is + // always indexable. The next reorder write persists the reconciled + // list, healing the table going forward. + const persisted = columnOrderRef.current ?? schemaCols.map((c) => c.name) + const known = new Set(persisted) + const missing = schemaCols.map((c) => c.name).filter((n) => !known.has(n)) + const currentOrder = missing.length > 0 ? [...persisted, ...missing] : persisted // Group-aware reorder: a workflow group's outputs must stay contiguous in // the persisted column order (`workflow-columns.ts` validates this on @@ -1055,7 +1201,7 @@ export function Table({ const cols = columnsRef.current const draggedGid = cols.find((c) => c.name === dragColumnNameRef.current)?.workflowGroupId - let left = CHECKBOX_COL_WIDTH + let left = checkboxColWidth let i = 0 while (i < cols.length) { const col = cols[i] @@ -1115,7 +1261,7 @@ export function Table({ }, [tableData?.id]) useEffect(() => { - if (!tableData?.metadata || metadataSeededRef.current) return + if (!tableData?.metadata) return if (!tableData.metadata.columnWidths && !tableData.metadata.columnOrder) return // First load: seed both from the server and remember we've seeded. if (!metadataSeededRef.current) { @@ -1165,6 +1311,87 @@ export function Table({ return () => document.removeEventListener('mouseup', handleMouseUp) }, []) + /** + * Auto-scroll the table while a cell-drag selection is in progress and the + * cursor enters a "hot zone" near the top or bottom of the scroll + * container. Scroll velocity ramps with proximity to the edge (max ~14px / + * frame at the very edge). The horizontal axis is intentionally left out: + * the fixed sticky checkbox column makes left-edge hot zones awkward and + * the table is rarely wider than the viewport in practice. + */ + useEffect(() => { + const HOT_ZONE_PX = 48 + const MAX_VELOCITY_PX = 14 + let pointerX: number | null = null + let pointerY: number | null = null + let rafId: number | null = null + + /** + * After auto-scroll moves the table under the cursor, no `mouseenter` + * fires on newly-revealed cells, so the selection focus would stay stuck + * on whatever cell was under the cursor when the cursor stopped moving. + * Manually re-pick the cell under the (unchanged) cursor coords and feed + * its row/col into the selection so the highlight expands as we scroll. + */ + const updateFocusUnderCursor = () => { + if (pointerX === null || pointerY === null) return + const target = document.elementFromPoint(pointerX, pointerY) + if (!target) return + const td = (target as HTMLElement).closest('td[data-row][data-col]') as HTMLElement | null + if (!td) return + const rowIndex = Number.parseInt(td.getAttribute('data-row') ?? '', 10) + const colIndex = Number.parseInt(td.getAttribute('data-col') ?? '', 10) + if (Number.isNaN(rowIndex) || Number.isNaN(colIndex)) return + setSelectionFocus({ rowIndex, colIndex }) + } + + const tick = () => { + rafId = null + const el = scrollRef.current + if (!isDraggingRef.current || !el || pointerY === null) return + const rect = el.getBoundingClientRect() + const distFromTop = pointerY - rect.top + const distFromBottom = rect.bottom - pointerY + let dy = 0 + if (distFromTop < HOT_ZONE_PX) { + const intensity = 1 - Math.max(0, distFromTop) / HOT_ZONE_PX + dy = -Math.ceil(intensity * MAX_VELOCITY_PX) + } else if (distFromBottom < HOT_ZONE_PX) { + const intensity = 1 - Math.max(0, distFromBottom) / HOT_ZONE_PX + dy = Math.ceil(intensity * MAX_VELOCITY_PX) + } + if (dy !== 0) { + el.scrollTop += dy + updateFocusUnderCursor() + rafId = requestAnimationFrame(tick) + } + } + + const handleMove = (e: MouseEvent) => { + if (!isDraggingRef.current) return + pointerX = e.clientX + pointerY = e.clientY + if (rafId === null) rafId = requestAnimationFrame(tick) + } + + const handleStop = () => { + pointerX = null + pointerY = null + if (rafId !== null) { + cancelAnimationFrame(rafId) + rafId = null + } + } + + document.addEventListener('mousemove', handleMove) + document.addEventListener('mouseup', handleStop) + return () => { + document.removeEventListener('mousemove', handleMove) + document.removeEventListener('mouseup', handleStop) + handleStop() + } + }, []) + useEffect(() => { anchorRowIdRef.current = selectionAnchor ? (rowsRef.current[selectionAnchor.rowIndex]?.id ?? null) @@ -1270,6 +1497,19 @@ export function Table({ const rowArrayIndex = rowsRef.current.findIndex((r) => r.id === rowId) const row = rowArrayIndex !== -1 ? rowsRef.current[rowArrayIndex] : null + + // Workflow-output cell with no value (status pill showing) → enter edit + // mode with a blank input so the user can write a value over the status. + // Escape cancels without persisting. + if (column?.workflowGroupId && row && canEditRef.current) { + const cellValue = row.data[columnName] + if (cellValue === null || cellValue === undefined || cellValue === '') { + setEditingCell({ rowId, columnName }) + setInitialCharacter('') + return + } + } + const colIndex = columnsRef.current.findIndex((c) => c.key === columnKey) let overflows = true if (row && colIndex !== -1) { @@ -1497,7 +1737,7 @@ export function Table({ e.preventDefault() const row = currentRows[anchor.rowIndex] if (row) { - setEditingRow(row) + onOpenRowModalRef.current(row) } return } @@ -1992,25 +2232,10 @@ export function Table({ scrollRef.current?.focus({ preventScroll: true }) }, []) - const generateColumnName = useCallback(() => { - const existing = schemaColumnsRef.current.map((c) => c.name.toLowerCase()) - let name = 'untitled' - let i = 2 - while (existing.includes(name.toLowerCase())) { - name = `untitled_${i}` - i++ - } - return name - }, []) - - const handleAddColumn = useCallback(() => { - // Open the sidebar in `'create'` mode — nothing is persisted until the - // user fills in name/type and hits Save. The sidebar's save flow handles - // both scalar (`addColumn`) and workflow-group (`addWorkflowGroup`) paths. - const name = generateColumnName() - setExecutionDetailsId(null) - setConfigState({ mode: 'create', columnName: name, proposedName: name }) - }, [generateColumnName]) + const generateColumnName = useCallback( + () => sharedGenerateColumnName(schemaColumnsRef.current), + [] + ) const handleChangeType = useCallback((columnName: string, newType: ColumnDefinition['type']) => { const column = columnsRef.current.find((c) => c.name === columnName) @@ -2090,33 +2315,40 @@ export function Table({ ) /** - * Config state for the side panel: - * - `null` → closed. - * - `{ mode: 'edit' }` → configuring an existing column (any type). - * - `{ mode: 'new' }` → user changed an existing column to workflow; not persisted until Save. - * - `{ mode: 'create' }` → user picked a workflow from "Add column"; column doesn't exist yet, - * created on Save in a single POST. - */ - const [configState, setConfigState] = useState(null) - /** Execution id whose run details are open in the slideout. */ - const [executionDetailsId, setExecutionDetailsId] = useState(null) - /** - * Right padding added to the table's scroll content while a slideout panel - * is open, equal to the panel's width. Without it, the rightmost columns are - * clipped under the panel and there's no way to scroll them into view. - * The two panels are mutually exclusive (each opener closes the other). + * Open the column-config sidebar pre-seeded with the chosen scalar type. + * Nothing is persisted until the user fills in the name and hits Save. */ - const logPanelWidth = useLogDetailsUIStore((state) => state.panelWidth) - const sidebarReservedWidth = configState - ? COLUMN_SIDEBAR_WIDTH_CSS - : executionDetailsId - ? `${logPanelWidth}px` - : '0px' - - const handleConfigureColumn = useCallback((columnName: string) => { - setExecutionDetailsId(null) - setConfigState({ mode: 'edit', columnName }) - }, []) + const handleAddColumnOfType = useCallback( + (type: ColumnDefinition['type']) => { + onOpenColumnConfig({ mode: 'create', proposedName: generateColumnName(), type }) + }, + [generateColumnName, onOpenColumnConfig] + ) + + /** Open the workflow-config sidebar to spawn a brand-new workflow group. */ + const handleAddWorkflowColumn = useCallback(() => { + onOpenWorkflowConfig({ mode: 'create', proposedName: generateColumnName() }) + }, [generateColumnName, onOpenWorkflowConfig]) + + const handleConfigureColumn = useCallback( + (columnName: string) => { + const column = columnsRef.current.find((c) => c.name === columnName) + if (column?.workflowGroupId) { + // Workflow-output column header → single-output sub-mode. + onOpenWorkflowConfig({ mode: 'edit-output', columnName }) + } else { + onOpenColumnConfig({ mode: 'edit', columnName }) + } + }, + [onOpenColumnConfig, onOpenWorkflowConfig] + ) + + const handleConfigureWorkflowGroup = useCallback( + (groupId: string) => { + onOpenWorkflowConfig({ mode: 'edit-group', groupId }) + }, + [onOpenWorkflowConfig] + ) const handleDeleteWorkflowGroup = useCallback( (groupId: string) => { @@ -2194,15 +2426,17 @@ export function Table({ // group with ≥1 output, hide them directly — no destructive-confirm // modal, since the workflow can re-produce the value any time. if (hideWorkflowOutputColumns(names)) return - setDeletingColumns(names) + onRequestDeleteColumns(names) }, - [resolveDeletionNames, hideWorkflowOutputColumns] + [resolveDeletionNames, hideWorkflowOutputColumns, onRequestDeleteColumns] ) - const handleDeleteColumnConfirm = useCallback(() => { - if (!deletingColumns || deletingColumns.length === 0) return - const columnsToDelete = [...deletingColumns] - setDeletingColumns(null) + // Populated as a sink so the wrapper's delete-columns modal can run the + // full cascade (per-column mutation + undo + columnOrder/columnWidths + // cleanup) without lifting any of that grid-internal state. + confirmDeleteColumnsSinkRef.current = (names: string[]) => { + if (!names || names.length === 0) return + const columnsToDelete = [...names] let currentOrder = columnOrderRef.current ? [...columnOrderRef.current] : null const cols = schemaColumnsRef.current @@ -2294,190 +2528,62 @@ export function Table({ setSelectionFocus(null) setIsColumnSelection(false) deleteNext(0) - }, [deletingColumns]) - - const handleSortChange = useCallback((column: string, direction: SortDirection) => { - setQueryOptions((prev) => ({ ...prev, sort: { [column]: direction } })) - }, []) - - const handleSortClear = useCallback(() => { - setQueryOptions((prev) => ({ ...prev, sort: null })) - }, []) - - const handleFilterApply = useCallback((filter: Filter | null) => { - setQueryOptions((prev) => ({ ...prev, filter })) - }, []) - - const [filterOpen, setFilterOpen] = useState(false) - - const handleFilterToggle = useCallback(() => { - setFilterOpen((prev) => !prev) - }, []) - - const handleFilterClose = useCallback(() => { - setFilterOpen(false) - }, []) - - const columnOptions = useMemo( - () => - displayColumns.map((col) => ({ - id: col.name, - label: col.name, - type: col.type, - icon: COLUMN_TYPE_ICONS[col.type], - })), - [displayColumns] - ) - - const tableDataRef = useRef(tableData) - tableDataRef.current = tableData - - const handleStartTableRename = useCallback(() => { - const data = tableDataRef.current - if (data) tableHeaderRename.startRename(tableId, data.name) - }, [tableHeaderRename.startRename, tableId]) - - const handleShowDeleteTableConfirm = useCallback(() => { - setShowDeleteTableConfirm(true) - }, []) - - const hasTableData = !!tableData - - const breadcrumbs = useMemo( - () => [ - { label: 'Tables', onClick: handleNavigateBack }, - { - label: tableData?.name ?? '', - editing: tableHeaderRename.editingId - ? { - isEditing: true, - value: tableHeaderRename.editValue, - onChange: tableHeaderRename.setEditValue, - onSubmit: tableHeaderRename.submitRename, - onCancel: tableHeaderRename.cancelRename, - } - : undefined, - dropdownItems: [ - { - label: 'Rename', - icon: Pencil, - disabled: !hasTableData, - onClick: handleStartTableRename, - }, - { - label: 'Delete', - icon: Trash, - disabled: !hasTableData, - onClick: handleShowDeleteTableConfirm, - }, - ], - }, - ], - [ - handleNavigateBack, - tableData?.name, - tableHeaderRename.editingId, - tableHeaderRename.editValue, - tableHeaderRename.setEditValue, - tableHeaderRename.submitRename, - tableHeaderRename.cancelRename, - hasTableData, - handleStartTableRename, - handleShowDeleteTableConfirm, - ] - ) - - const createTrigger = useMemo( - () => - userPermissions.canEdit ? ( - - ) : null, - [handleAddColumn, addColumnMutation.isPending, userPermissions.canEdit] - ) - - const handleExportCsv = useCallback(async () => { - if (!tableData) return - try { - await downloadTableExport(tableData.id, tableData.name) - } catch (err) { - logger.error('Failed to export table:', err) - toast.error('Failed to export table') - } - }, [tableData]) - - const headerActions = useMemo( - () => - tableData - ? [ - { - label: 'Import CSV', - icon: Upload, - onClick: () => setIsImportCsvOpen(true), - disabled: userPermissions.canEdit !== true, - }, - { - label: 'Export CSV', - icon: Download, - onClick: () => void handleExportCsv(), - disabled: tableData.rowCount === 0, - }, - ] - : undefined, - [tableData, userPermissions.canEdit, handleExportCsv] - ) - - const activeSortState = useMemo(() => { - if (!queryOptions.sort) return null - const entries = Object.entries(queryOptions.sort) - if (entries.length === 0) return null - const [column, direction] = entries[0] - return { column, direction } - }, [queryOptions.sort]) - - const sortConfig = useMemo( - () => ({ - options: columnOptions, - active: activeSortState, - onSort: handleSortChange, - onClear: handleSortClear, - }), - [columnOptions, activeSortState, handleSortChange, handleSortClear] - ) - - const selectedRowCount = useMemo(() => { - const contextRow = contextMenu.isOpen ? contextMenu.row : null - if (!contextRow) return 1 - - if (rowSelection.kind === 'all') { - return rows.some((r) => r.id === contextRow.id) ? Math.max(rows.length, 1) : 1 - } + } - if (rowSelection.kind === 'some' && rowSelection.ids.has(contextRow.id)) { - let count = 0 + /** + * Row ids the context menu acts on. If the right-clicked row is part of the + * gutter row selection, the materialized selection; if it's inside the active + * range selection, the range; otherwise just the row itself. Used by both the + * count label and the multi-row "Run workflows" action. + */ + const contextMenuRowIds = useMemo(() => { + if (!contextMenu.isOpen || !contextMenu.row) return [] + if ( + !rowSelectionIsEmpty(rowSelection) && + rowSelectionIncludes(rowSelection, contextMenu.row.id) + ) { + const ids: string[] = [] for (const row of rows) { - if (rowSelection.ids.has(row.id)) count++ + if (rowSelectionIncludes(rowSelection, row.id)) ids.push(row.id) } - return Math.max(count, 1) + return ids.length > 0 ? ids : [contextMenu.row.id] } - const sel = normalizedSelection - if (!sel) return 1 - - const contextRowArrayIndex = rows.findIndex((r) => r.id === contextRow.id) - if (contextRowArrayIndex < sel.startRow || contextRowArrayIndex > sel.endRow) return 1 - - const start = Math.max(0, sel.startRow) - const end = Math.min(rows.length - 1, sel.endRow) - return Math.max(end - start + 1, 1) + if (sel) { + const contextRowArrayIndex = rows.findIndex((r) => r.id === contextMenu.row!.id) + const isInSelection = + contextRowArrayIndex >= sel.startRow && contextRowArrayIndex <= sel.endRow + if (isInSelection) { + const ids: string[] = [] + const start = Math.max(0, sel.startRow) + const end = Math.min(rows.length - 1, sel.endRow) + for (let r = start; r <= end; r++) { + const row = rows[r] + if (row) ids.push(row.id) + } + return ids.length > 0 ? ids : [contextMenu.row.id] + } + } + return [contextMenu.row.id] }, [contextMenu.isOpen, contextMenu.row, rowSelection, normalizedSelection, rows]) + const selectedRowCount = contextMenuRowIds.length || 1 + const pendingUpdate = updateRowMutation.isPending ? updateRowMutation.variables : null - const workflowColumnNames = useMemo( - () => columns.filter((c) => !!c.workflowGroupId).map((c) => c.name), - [columns] - ) - const hasWorkflowColumns = workflowColumnNames.length > 0 + /** + * Row ids for the current multi-row selection. Drives "Run N selected rows" + * in the workflow-group run menu — `null` when there's no multi-selection so + * the menu collapses to "Run all rows". + */ + const selectedRowIds = useMemo(() => { + if (rowSelectionIsEmpty(rowSelection)) return null + const ids: string[] = [] + for (const row of rows) { + if (rowSelectionIncludes(rowSelection, row.id)) ids.push(row.id) + } + return ids.length > 0 ? ids : null + }, [rowSelection, rows]) const { runningByRowId, totalRunning } = useMemo(() => { const byRow = new Map() @@ -2486,7 +2592,7 @@ export function Table({ let count = 0 const executions = row.executions ?? {} for (const gid in executions) { - if (executions[gid]?.status === 'running') count++ + if (isExecInFlight(executions[gid])) count++ } if (count > 0) { byRow.set(row.id, count) @@ -2496,42 +2602,216 @@ export function Table({ return { runningByRowId: byRow, totalRunning: total } }, [rows]) - const cancelRunsMutate = cancelRunsMutation.mutate + // Context-menu wrappers: act on `contextMenuRowIds`, then close the menu. + // Mirror the action bar's Play / Refresh split: Play fills empty/failed, + // Refresh re-runs everything (including completed cells). + const handleRunWorkflowsOnSelection = () => { + onRunRows(contextMenuRowIds, 'incomplete') + closeContextMenu() + } + const handleRefreshWorkflowsOnSelection = () => { + onRunRows(contextMenuRowIds, 'all') + closeContextMenu() + } + const handleStopWorkflowsOnSelection = () => { + onStopRows(contextMenuRowIds) + closeContextMenu() + } - const handleStopAll = useCallback(() => { - if (totalRunning === 0) return - cancelRunsMutate({ scope: 'all' }) - }, [totalRunning, cancelRunsMutate]) + // Total running/queued cells across the rows the context menu is acting on; + // drives the "Stop N running workflows" item, shown only when > 0. + const runningInContextSelection = contextMenuRowIds.reduce( + (total, rowId) => total + (runningByRowId.get(rowId) ?? 0), + 0 + ) - const handleStopRow = useCallback( - (rowId: string) => { - cancelRunsMutate({ scope: 'row', rowId }) - }, - [cancelRunsMutate] + // Action-bar selection covers both gutter row-selection AND multi-row + // range selection (clicking + dragging across rows), matching how the + // right-click context menu treats them. Single-row range doesn't trigger + // the bar — only multi-row, since the per-row gutter button already covers + // that case. Gutter selection wins when both exist. + const actionBarRowIds = useMemo(() => { + if (!rowSelectionIsEmpty(rowSelection)) { + const ids: string[] = [] + for (const row of rows) { + if (rowSelectionIncludes(rowSelection, row.id)) ids.push(row.id) + } + return ids + } + const sel = normalizedSelection + if (sel && sel.endRow > sel.startRow) { + const ids: string[] = [] + const start = Math.max(0, sel.startRow) + const end = Math.min(rows.length - 1, sel.endRow) + for (let r = start; r <= end; r++) { + const row = rows[r] + if (row) ids.push(row.id) + } + return ids + } + return [] + }, [rowSelection, normalizedSelection, rows]) + const runningInActionBarSelection = actionBarRowIds.reduce( + (total, rowId) => total + (runningByRowId.get(rowId) ?? 0), + 0 + ) + + /** + * Selection that resolves to exactly one workflow-group execution — same + * row, every highlighted column belonging to the same workflow group. Drives + * the action bar's per-execution mode (View execution / Run cell / Stop + * cell). Includes the single-cell case (1×1) and the "highlight a row's + * workflow outputs" case (1 row × N cols, all in one group). Null for + * multi-row selections, plain columns, or no selection. + */ + const singleWorkflowCell = useMemo(() => { + const sel = normalizedSelection + if (!sel) return null + if (sel.startRow !== sel.endRow) return null + const row = rows[sel.startRow] + if (!row) return null + const firstCol = displayColumns[sel.startCol] + const groupId = firstCol?.workflowGroupId + if (!groupId) return null + // All columns in the highlight must be in the same workflow group, else + // we'd be straddling two executions. + for (let c = sel.startCol + 1; c <= sel.endCol; c++) { + if (displayColumns[c]?.workflowGroupId !== groupId) return null + } + const exec = row.executions?.[groupId] + const status = exec?.status + return { + rowId: row.id, + groupId, + executionId: exec?.executionId ?? null, + canViewExecution: status === 'completed' || status === 'error' || status === 'running', + } + }, [normalizedSelection, rows, displayColumns]) + + const tableWorkflowGroupIds = useMemo( + () => tableWorkflowGroups.map((g) => g.id), + [tableWorkflowGroups] ) + // Drives Run vs Refresh visibility on the context menu — same classifier + // the action bar uses, so both surfaces stay in sync. + const contextMenuStats = useMemo( + () => classifyExecStatusMix(rows, new Set(contextMenuRowIds), tableWorkflowGroupIds), + [contextMenuRowIds, rows, tableWorkflowGroupIds] + ) + + // Run scope is derived from one of two selection sources: + // - rowSelection (gutter whole-row selection) → those rows × every workflow group + // - normalizedSelection rectangle covering workflow-output columns → + // rows in the rectangle × distinct workflow groups inside it + const selectedRunScope = useMemo(() => { + if (tableWorkflowGroupIds.length === 0) return null + if (!rowSelectionIsEmpty(rowSelection)) { + const rowIds: string[] = [] + for (const row of rows) { + if (rowSelectionIncludes(rowSelection, row.id)) rowIds.push(row.id) + } + if (rowIds.length === 0) return null + return { groupIds: tableWorkflowGroupIds, rowIds } + } + const sel = normalizedSelection + if (!sel) return null + const groupIdsInRect = new Set() + for (let c = Math.max(0, sel.startCol); c <= sel.endCol; c++) { + const gid = displayColumns[c]?.workflowGroupId + if (gid) groupIdsInRect.add(gid) + } + if (groupIdsInRect.size === 0) return null + const rowIds: string[] = [] + const startRow = Math.max(0, sel.startRow) + const endRow = Math.min(rows.length - 1, sel.endRow) + for (let r = startRow; r <= endRow; r++) { + const row = rows[r] + if (row) rowIds.push(row.id) + } + if (rowIds.length === 0) return null + return { groupIds: [...groupIdsInRect], rowIds } + }, [rowSelection, normalizedSelection, rows, displayColumns, tableWorkflowGroupIds]) + + const selectionStats = useMemo(() => { + if (!selectedRunScope) { + return { hasIncompleteOrFailed: false, hasCompleted: false, hasInFlight: false } + } + return classifyExecStatusMix(rows, new Set(selectedRunScope.rowIds), selectedRunScope.groupIds) + }, [selectedRunScope, rows]) + + // Emit selection snapshots so the wrapper can render . + // The grid can't fold this into individual event handlers (running counts + // come from React Query refetches, not user events) so it's intentionally + // an effect — but we content-compare against the last sent snapshot so a + // re-render where nothing actually changed doesn't churn the wrapper. + const onSelectionChangeRef = useRef(onSelectionChange) + onSelectionChangeRef.current = onSelectionChange + const lastSelectionSnapshotRef = useRef(null) + useEffect(() => { + const prev = lastSelectionSnapshotRef.current + const sameSingleCell = + (prev?.singleWorkflowCell ?? null) === null && singleWorkflowCell === null + ? true + : prev?.singleWorkflowCell && + singleWorkflowCell && + prev.singleWorkflowCell.rowId === singleWorkflowCell.rowId && + prev.singleWorkflowCell.groupId === singleWorkflowCell.groupId && + prev.singleWorkflowCell.executionId === singleWorkflowCell.executionId && + prev.singleWorkflowCell.canViewExecution === singleWorkflowCell.canViewExecution + const sameRunScope = + (prev?.selectedRunScope ?? null) === null && selectedRunScope === null + ? true + : prev?.selectedRunScope && + selectedRunScope && + prev.selectedRunScope.groupIds.length === selectedRunScope.groupIds.length && + prev.selectedRunScope.rowIds.length === selectedRunScope.rowIds.length && + prev.selectedRunScope.groupIds.every((id, i) => id === selectedRunScope.groupIds[i]) && + prev.selectedRunScope.rowIds.every((id, i) => id === selectedRunScope.rowIds[i]) + const sameStats = + prev?.selectionStats && + prev.selectionStats.hasIncompleteOrFailed === selectionStats.hasIncompleteOrFailed && + prev.selectionStats.hasCompleted === selectionStats.hasCompleted && + prev.selectionStats.hasInFlight === selectionStats.hasInFlight + if ( + prev && + sameSingleCell && + sameRunScope && + sameStats && + prev.runningInActionBarSelection === runningInActionBarSelection && + prev.totalRunning === totalRunning && + prev.hasWorkflowColumns === hasWorkflowColumns && + prev.actionBarRowIds.length === actionBarRowIds.length && + prev.actionBarRowIds.every((id, i) => id === actionBarRowIds[i]) + ) { + return + } + const next: SelectionSnapshot = { + actionBarRowIds, + runningInActionBarSelection, + totalRunning, + hasWorkflowColumns, + selectedRunScope, + selectionStats, + singleWorkflowCell, + } + lastSelectionSnapshotRef.current = next + onSelectionChangeRef.current(next) + }, [ + actionBarRowIds, + runningInActionBarSelection, + totalRunning, + hasWorkflowColumns, + selectedRunScope, + selectionStats, + singleWorkflowCell, + ]) + const handleRunRow = useCallback( (rowId: string) => { - if (tableWorkflowGroups.length === 0) return - const target = rowsRef.current.find((r) => r.id === rowId) - if (!target) return - // Only fire groups whose deps are already satisfied for THIS row. The - // cascade picks up downstream groups: when an upstream group completes, - // `scheduleWorkflowGroupRuns` evaluates eligibility and enqueues the - // newly-ready successors automatically. - for (const group of tableWorkflowGroups) { - if (!areRowDepsSatisfied(group, target)) continue - void runWorkflowGroup({ - tableId, - rowId, - workspaceId, - groupId: group.id, - workflowId: group.workflowId, - outputColumnNames: group.outputs.map((o) => o.columnName), - }) - } + onRunRow(rowId) }, - [runWorkflowGroup, tableId, workspaceId, tableWorkflowGroups] + [onRunRow] ) if (!isLoadingTable && !tableData) { @@ -2550,46 +2830,12 @@ export function Table({ return (
- {!embedded && ( - <> - 0 ? ( - - ) : null - } - /> - - - {filterOpen && ( - - )} - - )} - {embedded && totalRunning > 0 && (
)} @@ -2609,8 +2855,8 @@ export function Table({
{isLoadingTable ? ( - + {Array.from({ length: SKELETON_COL_COUNT }).map((_, i) => ( ))} ) : ( - + )} {isLoadingTable ? ( @@ -2655,7 +2905,7 @@ export function Table({ <> {hasWorkflowGroup && ( - @@ -2787,9 +3053,10 @@ export function Table({ onRowToggle={handleRowToggle} runningCount={runningByRowId.get(row.id) ?? 0} hasWorkflowColumns={hasWorkflowColumns} - onStopRow={handleStopRow} + isLargeRowCountTable={isLargeRowCountTable} + onStopRow={onStopRow} onRunRow={handleRunRow} - workflowNameById={workflowNameById} + workflowGroups={tableWorkflowGroups} /> ))} @@ -2822,55 +3089,8 @@ export function Table({ )} - - setConfigState(null)} - existingColumn={ - configState?.mode === 'edit' - ? (columns.find((c) => c.name === configState.columnName) ?? null) - : null - } - allColumns={columns} - workflowGroups={tableWorkflowGroups} - workflows={workflows} - workspaceId={workspaceId} - tableId={tableId} - /> - - setExecutionDetailsId(null)} - /> - {editingRow && tableData && ( - setEditingRow(null)} - table={tableData} - row={editingRow} - onSuccess={() => setEditingRow(null)} - /> - )} - - {deletingRows.length > 0 && tableData && ( - setDeletingRows([])} - table={tableData} - rowIds={deletingRows.map((r) => r.rowId)} - onSuccess={() => { - pushUndo({ type: 'delete-rows', rows: deletingRows }) - setDeletingRows([]) - handleClearSelection() - }} - /> - )} - - - {!embedded && ( - - - Delete Table - -

- Are you sure you want to delete{' '} - {tableData?.name}?{' '} - - All {tableData?.rowCount ?? 0} rows will be removed. - {' '} - You can restore it from Recently Deleted in Settings. -

-
- - - - -
-
- )} - - {tableData && ( - - )} - - { - if (!open) setDeletingColumns(null) - }} - > - - - {deletingColumns && deletingColumns.length > 1 - ? `Delete ${deletingColumns.length} Columns` - : 'Delete Column'} - - -

- {deletingColumns && deletingColumns.length > 1 ? ( - <> - Are you sure you want to delete{' '} - - {deletingColumns.length} columns - - ?{' '} - - ) : ( - <> - Are you sure you want to delete{' '} - - {deletingColumns?.[0]} - - ?{' '} - - )} - - This will remove all data in{' '} - {deletingColumns && deletingColumns.length > 1 ? 'these columns' : 'this column'}. - {' '} - You can undo this action. -

-
- - - - -
-
) } @@ -2996,13 +3139,15 @@ export function Table({ const TableColGroup = React.memo(function TableColGroup({ columns, columnWidths, + checkboxColWidth, }: { columns: DisplayColumn[] columnWidths: Record + checkboxColWidth: number }) { return (
- + {columns.map((col) => ( ))} @@ -3033,10 +3178,15 @@ interface DataRowProps { runningCount: number /** Whether the table has at least one workflow column — controls whether a run/stop icon is rendered. */ hasWorkflowColumns: boolean + /** True for tables sized for >9,999 rows; widens the row-number slot to fit 5–7 digit numbers. */ + isLargeRowCountTable: boolean onStopRow: (rowId: string) => void onRunRow: (rowId: string) => void - /** Lookup from workflow id → human-readable name, used to label running cells. */ - workflowNameById: Record + /** + * The table's workflow groups, used to compute per-row "Waiting on …" labels + * for empty workflow-output cells whose group has unmet dependencies. + */ + workflowGroups: WorkflowGroup[] } function cellRangeRowChanged( @@ -3089,9 +3239,10 @@ function dataRowPropsAreEqual(prev: DataRowProps, next: DataRowProps): boolean { prev.onRowToggle !== next.onRowToggle || prev.runningCount !== next.runningCount || prev.hasWorkflowColumns !== next.hasWorkflowColumns || + prev.isLargeRowCountTable !== next.isLargeRowCountTable || prev.onStopRow !== next.onStopRow || prev.onRunRow !== next.onRunRow || - prev.workflowNameById !== next.workflowNameById + prev.workflowGroups !== next.workflowGroups ) { return false } @@ -3130,28 +3281,52 @@ const DataRow = React.memo(function DataRow({ onRowToggle, runningCount, hasWorkflowColumns, + isLargeRowCountTable, onStopRow, onRunRow, - workflowNameById, + workflowGroups, }: DataRowProps) { const sel = normalizedSelection + /** + * Per-row "Waiting on …" labels keyed by group id. A group has labels iff + * at least one of its dependencies is unmet for this row — drives the + * "Waiting" pill rendered by `CellContent` for empty workflow-output cells. + * Computed once per render rather than per cell so all cells in a group + * share the same array reference. + */ + const waitingByGroupId = React.useMemo(() => { + if (workflowGroups.length === 0) return null + const map = new Map() + for (const group of workflowGroups) { + // autoRun=false groups never fire from the scheduler — there's nothing + // to wait on. The cell stays empty until the user clicks Run manually. + if (group.autoRun === false) continue + const unmet = getUnmetGroupDeps(group, row) + if (unmet.columns.length === 0) continue + map.set(group.id, unmet.columns) + } + return map + }, [workflowGroups, row]) const isMultiCell = sel !== null && (sel.startRow !== sel.endRow || sel.startCol !== sel.endCol) const isRowSelected = isRowChecked return ( onContextMenu(e, row)}> - @@ -3256,7 +3433,11 @@ const DataRow = React.memo(function DataRow({ initialCharacter={isEditing ? initialCharacter : undefined} onSave={(value, reason) => onSave(row.id, column.name, value, reason)} onCancel={onCancel} - workflowNameById={workflowNameById} + waitingOnLabels={ + column.workflowGroupId + ? (waitingByGroupId?.get(column.workflowGroupId) ?? undefined) + : undefined + } /> @@ -3298,41 +3479,6 @@ const TableBodySkeleton = React.memo(function TableBodySkeleton({ ) }) -interface RunStatusControlProps { - running: number - onStopAll: () => void - isStopping: boolean -} - -/** - * Run-status + Stop-all control rendered in the header's trailing actions row. - * Matches the in-cell running indicator (`Loader` + tertiary text) for consistency. - */ -const RunStatusControl = React.memo(function RunStatusControl({ - running, - onStopAll, - isStopping, -}: RunStatusControlProps) { - return ( -
-
- - {running} - running -
- -
- ) -}) - const SelectAllCheckbox = React.memo(function SelectAllCheckbox({ checked, onCheckedChange, @@ -3363,44 +3509,6 @@ const SelectAllCheckbox = React.memo(function SelectAllCheckbox({ ) }) -const AddColumnButton = React.memo(function AddColumnButton({ - onClick, - disabled, -}: { - onClick: () => void - disabled: boolean -}) { - return ( - - ) -}) - -const HEADER_ADD_COLUMN_ICON = - -function HeaderAddColumnTrigger({ onClick, disabled }: { onClick: () => void; disabled: boolean }) { - return ( - - ) -} - const AddRowButton = React.memo(function AddRowButton({ onClick }: { onClick: () => void }) { return (
@@ -3415,21 +3523,3 @@ const AddRowButton = React.memo(function AddRowButton({ onClick }: { onClick: ()
) }) - -/** - * Reuses the logs page's `LogDetails` slideout inside the tables view so a user - * can inspect a workflow run for a cell without leaving the table. The query is - * keyed on `executionId` because that's what's stored on the cell. - */ -function ExecutionDetailsSidebar({ - workspaceId, - executionId, - onClose, -}: { - workspaceId: string - executionId: string | null - onClose: () => void -}) { - const { data: log } = useLogByExecutionId(workspaceId, executionId) - return -} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/types.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/types.ts similarity index 100% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/types.ts rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/types.ts diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/utils.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/utils.ts similarity index 76% rename from apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/utils.ts rename to apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/utils.ts index 6721e6e60f9..73fa8db9c49 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/utils.ts @@ -123,24 +123,51 @@ export function readExecution( return row?.executions?.[groupId] } +export interface ExecStatusMix { + hasIncompleteOrFailed: boolean + hasCompleted: boolean + hasInFlight: boolean +} + /** - * Client-side mirror of the scheduler's deps predicate. Used to filter the - * row-run button so we don't fire downstream groups whose upstream isn't - * `completed` yet — the cascade handles those once the upstream finishes. + * Walks `(rowIdSet × groupIds)` exec statuses on `rows` and reports which + * status buckets are present. Short-circuits once all three buckets are + * observed and once every selected row has been visited. Drives Play / + * Refresh / Stop visibility on the action bar and the context menu — both + * surfaces use the same shape so they stay in sync. */ -export function areRowDepsSatisfied( - group: WorkflowGroup, - row: { data: Record; executions?: RowExecutions } -): boolean { - const deps = group.dependencies ?? {} - for (const colName of deps.columns ?? []) { - const value = row.data[colName] - if (value === null || value === undefined || value === '') return false +export function classifyExecStatusMix( + rows: TableRowType[], + rowIdSet: ReadonlySet, + groupIds: readonly string[] +): ExecStatusMix { + const result: ExecStatusMix = { + hasIncompleteOrFailed: false, + hasCompleted: false, + hasInFlight: false, } - for (const gid of deps.workflowGroups ?? []) { - if (row.executions?.[gid]?.status !== 'completed') return false + if (rowIdSet.size === 0 || groupIds.length === 0) return result + const target = rowIdSet.size + let seen = 0 + for (const row of rows) { + if (!rowIdSet.has(row.id)) continue + seen++ + for (const groupId of groupIds) { + const status = readExecution(row, groupId)?.status + if (status === 'queued' || status === 'running' || status === 'pending') { + result.hasInFlight = true + } else if (status === 'completed') { + result.hasCompleted = true + } else { + result.hasIncompleteOrFailed = true + } + if (result.hasInFlight && result.hasCompleted && result.hasIncompleteOrFailed) { + return result + } + } + if (seen === target) break } - return true + return result } export function moveCell( diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/cell-content.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/cell-content.tsx deleted file mode 100644 index 5224fd80a24..00000000000 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/cell-content.tsx +++ /dev/null @@ -1,173 +0,0 @@ -'use client' - -import type React from 'react' -import { Circle } from 'lucide-react' -import { Checkbox } from '@/components/emcn' -import { Loader } from '@/components/emcn/icons/loader' -import { cn } from '@/lib/core/utils/cn' -import type { RowExecutionMetadata } from '@/lib/table' -import type { SaveReason } from '../../../types' -import { storageToDisplay } from '../../../utils' -import type { DisplayColumn } from '../types' -import { InlineEditor } from './inline-editors' - -interface CellContentProps { - value: unknown - exec?: RowExecutionMetadata - column: DisplayColumn - isEditing: boolean - initialCharacter?: string | null - onSave: (value: unknown, reason: SaveReason) => void - onCancel: () => void - workflowNameById?: Record -} - -/** - * Renders the visible content of a single cell. Workflow-output cells follow - * a status-state-machine (block error / value / running / waiting / cancelled - * / dash); plain cells render the typed value. When `isEditing` is true the - * `InlineEditor` overlay sits on top of the static content. - */ -export function CellContent({ - value, - exec, - column, - isEditing, - initialCharacter, - onSave, - onCancel, -}: CellContentProps) { - const isNull = value === null || value === undefined - - let displayContent: React.ReactNode = null - if (column.workflowGroupId) { - const blockId = column.outputBlockId - const blockError = blockId ? exec?.blockErrors?.[blockId] : undefined - const blockRunning = blockId ? (exec?.runningBlockIds?.includes(blockId) ?? false) : false - const hasValue = !isNull - const valueText = - typeof value === 'string' - ? value - : value === null || value === undefined - ? '' - : JSON.stringify(value) - - // Once any block in the group has reported an error, downstream cells - // that haven't started won't run on this attempt — collapse them to dash - // instead of leaving a stale "Waiting" spinner if the cell task didn't - // reach a clean terminal state. - const groupHasBlockErrors = !!(exec?.blockErrors && Object.keys(exec.blockErrors).length > 0) - if (blockError) { - displayContent = ( - - Error - - ) - } else if (hasValue) { - displayContent = ( - - {valueText} - - ) - } else if ( - (exec?.status === 'running' || exec?.status === 'pending') && - !(groupHasBlockErrors && !blockRunning) - ) { - // Motion only when this cell's own block is in flight. Pending and - // upstream-blocked Waiting render as static dots — the moving spinner - // is reserved for "right now, actually running". - if (blockRunning) { - displayContent = ( -
- - - Running - -
- ) - } else { - const label = exec.status === 'pending' ? 'Pending' : 'Waiting' - displayContent = ( -
- - - {label} - -
- ) - } - } else if (exec?.status === 'cancelled') { - displayContent = ( - - Cancelled - - ) - } else { - displayContent = - } - // Workflow-output cells are hand-editable: hide the status content under - // the InlineEditor when the user opts to edit, then fall through to the - // common return that renders the editor overlay. - if (isEditing) { - displayContent =
{displayContent}
- } - } else if (column.type === 'boolean') { - displayContent = ( -
- - - -
- ) - } else if (!isNull && column.type === 'json') { - displayContent = ( - - {JSON.stringify(value)} - - ) - } else if (!isNull && column.type === 'date') { - displayContent = ( - - {storageToDisplay(String(value))} - - ) - } else if (!isNull) { - displayContent = ( - - {String(value)} - - ) - } - - return ( - <> - {isEditing && ( -
- -
- )} - {displayContent} - - ) -} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/column-type-icon.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/column-type-icon.tsx deleted file mode 100644 index f0fc0d08be7..00000000000 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/headers/column-type-icon.tsx +++ /dev/null @@ -1,59 +0,0 @@ -'use client' - -import type React from 'react' -import { - Calendar as CalendarIcon, - TypeBoolean, - TypeJson, - TypeNumber, - TypeText, -} from '@/components/emcn/icons' -import type { BlockIconInfo } from '../types' - -export const COLUMN_TYPE_ICONS: Record = { - string: TypeText, - number: TypeNumber, - boolean: TypeBoolean, - date: CalendarIcon, - json: TypeJson, -} - -interface ColumnTypeIconProps { - type: string - workflowColor?: string - blockIconInfo?: BlockIconInfo -} - -/** - * Tiny icon shown next to a column header. For workflow-output columns: - * the producing block's icon (when known) or a colored swatch tinted with - * the workflow's color. For plain columns: the type icon. - */ -export function ColumnTypeIcon({ type, workflowColor, blockIconInfo }: ColumnTypeIconProps) { - if (workflowColor || blockIconInfo) { - if (blockIconInfo) { - const BlockIcon = blockIconInfo.icon - return ( - - - - ) - } - const color = workflowColor ?? 'var(--text-muted)' - return ( - - ) - } - const Icon = COLUMN_TYPE_ICONS[type] ?? TypeText - return -} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/index.ts deleted file mode 100644 index f75bc0849df..00000000000 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { Table } from './table' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/index.ts new file mode 100644 index 00000000000..6d45862e281 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/index.ts @@ -0,0 +1 @@ +export { type WorkflowConfig, WorkflowSidebar } from './workflow-sidebar' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/run-settings-section.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/run-settings-section.tsx new file mode 100644 index 00000000000..320eda4bd1f --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/run-settings-section.tsx @@ -0,0 +1,48 @@ +'use client' + +import { Combobox, Label } from '@/components/emcn' +import type { ColumnDefinition } from '@/lib/table' + +interface RunSettingsSectionProps { + /** All columns the group can depend on (left-of-current scalar + workflow + * output columns alike). */ + depOptions: ColumnDefinition[] + /** Column names this group waits on. */ + deps: string[] + onChangeDeps: (next: string[]) => void +} + +/** + * "Run after" picker: which upstream columns must be filled before this group + * fires. Workflow output columns count the same as plain columns — once a + * column is non-empty, the dep is satisfied. Empty selection = the group fires + * on any row change. + */ +export function RunSettingsSection({ depOptions, deps, onChangeDeps }: RunSettingsSectionProps) { + const options = depOptions.map((c) => ({ label: c.name, value: c.name })) + + return ( +
+ + + {deps.length === 0 ? 'Any row change' : `${deps.length} selected`} + + } + /> +
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/workflow-sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/workflow-sidebar.tsx new file mode 100644 index 00000000000..a1a4a490875 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/workflow-sidebar/workflow-sidebar.tsx @@ -0,0 +1,895 @@ +'use client' + +import type React from 'react' +import { useMemo, useState } from 'react' +import { toError } from '@sim/utils/errors' +import { generateId } from '@sim/utils/id' +import { useMutation, useQueryClient } from '@tanstack/react-query' +import { ExternalLink, RepeatIcon, SplitIcon, X } from 'lucide-react' +import { + Button, + Combobox, + type ComboboxOptionGroup, + FieldDivider, + Input, + Label, + Loader, + Switch, + Tooltip, + toast, +} from '@/components/emcn' +import { findValidationIssue, isValidationError } from '@/lib/api/client/errors' +import { requestJson } from '@/lib/api/client/request' +import type { + AddWorkflowGroupBodyInput, + UpdateWorkflowGroupBodyInput, +} from '@/lib/api/contracts/tables' +import { + putWorkflowNormalizedStateContract, + type WorkflowStateContractInput, +} from '@/lib/api/contracts/workflows' +import { cn } from '@/lib/core/utils/cn' +import type { + ColumnDefinition, + WorkflowGroup, + WorkflowGroupDependencies, + WorkflowGroupOutput, +} from '@/lib/table' +import { columnTypeForLeaf, deriveOutputColumnName } from '@/lib/table/column-naming' +import { + type FlattenOutputsBlockInput, + type FlattenOutputsEdgeInput, + flattenWorkflowOutputs, + getBlockExecutionOrder, +} from '@/lib/workflows/blocks/flatten-outputs' +import { normalizeInputFormatValue } from '@/lib/workflows/input-format' +import { TriggerUtils } from '@/lib/workflows/triggers/triggers' +import type { InputFormatField } from '@/lib/workflows/types' +import { PreviewWorkflow } from '@/app/workspace/[workspaceId]/w/components/preview' +import { getBlock } from '@/blocks' +import { + useAddWorkflowGroup, + useUpdateColumn, + useUpdateWorkflowGroup, +} from '@/hooks/queries/tables' +import { useWorkflowState, workflowKeys } from '@/hooks/queries/workflows' +import type { WorkflowMetadata } from '@/stores/workflows/registry/types' +import { RunSettingsSection } from './run-settings-section' + +/** + * Discriminates the three flows the workflow sidebar handles: + * - `create`: brand-new workflow group spawned from the "+ New column" dropdown's "Workflow" item. + * - `edit-group`: opened from the workflow-group meta header. Lets the user edit the whole group + * (workflow id, deps, output set, group name). + * - `edit-output`: opened from a single workflow-output column header. Focuses on this column's + * `(blockId, path)` mapping + column rename. Other group-wide controls remain visible but + * secondary. + */ +export type WorkflowConfig = + | { mode: 'create'; proposedName: string } + | { mode: 'edit-group'; groupId: string } + | { mode: 'edit-output'; columnName: string } + +interface WorkflowSidebarProps { + config: WorkflowConfig | null + onClose: () => void + /** All scalar + workflow-output columns on the table. Drives the deps picker + * options and the "missing inputs" prompt. */ + allColumns: ColumnDefinition[] + workflowGroups: WorkflowGroup[] + workflows: WorkflowMetadata[] | undefined + workspaceId: string + tableId: string + /** Notify parent of a per-output-column rename so it can rewrite local + * `columnOrder` / `columnWidths` keys. */ + onColumnRename?: (oldName: string, newName: string) => void +} + +const OUTPUT_VALUE_SEPARATOR = '::' + +const encodeOutputValue = (blockId: string, path: string) => + `${blockId}${OUTPUT_VALUE_SEPARATOR}${path}` + +const decodeOutputValue = (value: string): { blockId: string; path: string } => { + const idx = value.indexOf(OUTPUT_VALUE_SEPARATOR) + if (idx === -1) return { blockId: '', path: value } + return { blockId: value.slice(0, idx), path: value.slice(idx + OUTPUT_VALUE_SEPARATOR.length) } +} + +interface BlockOutputGroup { + blockId: string + blockName: string + blockType: string + blockIcon: string | React.ComponentType<{ className?: string }> + blockColor: string + paths: string[] +} + +interface WorkflowStatePayload { + blocks: Record< + string, + { + type: string + subBlocks?: Record + } & Record + > + edges: unknown[] + loops: unknown + parallels: unknown + lastSaved?: number + isDeployed?: boolean +} + +function tableColumnTypeToInputType(colType: ColumnDefinition['type'] | undefined): string { + switch (colType) { + case 'number': + return 'number' + case 'boolean': + return 'boolean' + case 'json': + return 'object' + default: + return 'string' + } +} + +function RequiredLabel({ htmlFor, children }: { htmlFor?: string; children: React.ReactNode }) { + return ( + + ) +} + +function FieldError({ message }: { message: string }) { + return

{message}

+} + +const TagIcon: React.FC<{ + icon: string | React.ComponentType<{ className?: string }> + color: string +}> = ({ icon, color }) => ( +
+ {typeof icon === 'string' ? ( + {icon} + ) : ( + (() => { + const IconComponent = icon + return + })() + )} +
+) + +/** + * Right-edge sidebar for workflow group configuration. Three flows: + * - create a new group (workflow + outputs + deps), + * - edit an existing group (same fields, plus rename output-column option), + * - edit a single output column's mapping (swap which `(blockId, path)` it + * reads from, rename the column). + * + * All form state lives in ``, which the outer shell + * mounts with `key={configKey(config)}` so opening a different group/column + * remounts and re-seeds state from props (no `useEffect` mirror). + */ +export function WorkflowSidebar(props: WorkflowSidebarProps) { + const open = props.config !== null + return ( + + ) +} + +function configKey(config: WorkflowConfig): string { + switch (config.mode) { + case 'create': + return `create:${config.proposedName}` + case 'edit-group': + return `edit-group:${config.groupId}` + case 'edit-output': + return `edit-output:${config.columnName}` + } +} + +interface WorkflowSidebarBodyProps extends Omit { + config: WorkflowConfig +} + +function WorkflowSidebarBody({ + config, + onClose, + allColumns, + workflowGroups, + workflows, + workspaceId, + tableId, + onColumnRename, +}: WorkflowSidebarBodyProps) { + const updateColumn = useUpdateColumn({ workspaceId, tableId }) + const addWorkflowGroup = useAddWorkflowGroup({ workspaceId, tableId }) + const updateWorkflowGroup = useUpdateWorkflowGroup({ workspaceId, tableId }) + + // Resolve the existing group (if any) and the existing single-output column + // (if `mode === 'edit-output'`) from props. These are derivations — used + // only for seeding the form below and for save-time diffs. + const existingGroup: WorkflowGroup | undefined = (() => { + if (config.mode === 'edit-group') return workflowGroups.find((g) => g.id === config.groupId) + if (config.mode === 'edit-output') { + const col = allColumns.find((c) => c.name === config.columnName) + return col?.workflowGroupId + ? workflowGroups.find((g) => g.id === col.workflowGroupId) + : undefined + } + return undefined + })() + const existingColumn = + config.mode === 'edit-output' + ? (allColumns.find((c) => c.name === config.columnName) ?? null) + : null + + // Anchor column for "left of current" filtering. For create + edit-group we + // treat the anchor as missing (group config sits at the right edge of the + // group); for edit-output the anchor is the column being edited. + const anchorColumnName = config.mode === 'edit-output' ? config.columnName : null + + /** + * Columns "left of current" — these are the only valid trigger dependencies. + * For create + edit-group, every existing column qualifies. For edit-output, + * only columns physically before the anchor. + */ + const otherColumns = (() => { + if (anchorColumnName === null) return allColumns + const idx = allColumns.findIndex((c) => c.name === anchorColumnName) + if (idx === -1) return allColumns.filter((c) => c.name !== anchorColumnName) + return allColumns.slice(0, idx) + })() + + // Every left-of-current column is a valid dep — workflow output columns + // included. Exclude this group's own outputs (you can't depend on yourself). + const ownOutputNames = new Set(existingGroup?.outputs.map((o) => o.columnName) ?? []) + const depOptions = otherColumns.filter((c) => !ownOutputNames.has(c.name)) + + // Default deps for a brand-new group: tick every left-of-current column. + const defaultDeps = depOptions.map((c) => c.name) + + const [selectedWorkflowId, setSelectedWorkflowId] = useState( + () => existingGroup?.workflowId ?? '' + ) + // For existing groups, treat a missing `autoRun` field as `true` (pre-feature + // groups all ran automatically and shouldn't silently flip to manual when + // the user just opens the sidebar). For brand-new groups, default to `false` + // so the user opts in to auto-run explicitly. + const [autoRun, setAutoRun] = useState(() => + existingGroup ? existingGroup.autoRun !== false : false + ) + const [deps, setDeps] = useState( + () => existingGroup?.dependencies?.columns ?? defaultDeps + ) + // `selectedOutputs` is encoded `${blockId}::${path}`. Seeded once `blockOutputGroups` + // resolves (we may not have the workflow blocks loaded at first render); see the + // post-load reconciliation below. + const [selectedOutputs, setSelectedOutputs] = useState([]) + const [outputsHydrated, setOutputsHydrated] = useState(false) + const [columnNameInput, setColumnNameInput] = useState( + () => existingColumn?.name ?? (config.mode === 'create' ? config.proposedName : '') + ) + const [showValidation, setShowValidation] = useState(false) + const [nameError, setNameError] = useState(null) + + const workflowState = useWorkflowState(selectedWorkflowId || undefined) + + /** Resolves the unified Start block id and its current `inputFormat` field + * names. The "Add inputs" mutation only adds rows for table columns that + * aren't already represented in the start block. */ + const startBlockInputs = useMemo<{ + blockId: string | null + existingNames: Set + existing: InputFormatField[] + }>(() => { + const blocks = (workflowState.data as { blocks?: Record } | null) + ?.blocks + if (!blocks) return { blockId: null, existingNames: new Set(), existing: [] } + const candidate = TriggerUtils.findStartBlock(blocks, 'manual') + if (!candidate) return { blockId: null, existingNames: new Set(), existing: [] } + const block = blocks[candidate.blockId] as + | { subBlocks?: Record } + | undefined + const existing = normalizeInputFormatValue(block?.subBlocks?.inputFormat?.value) + return { + blockId: candidate.blockId, + existingNames: new Set(existing.map((f) => f.name).filter((n): n is string => !!n)), + existing, + } + }, [workflowState.data]) + + const missingInputColumnNames = useMemo(() => { + if (!startBlockInputs.blockId) return [] + const anchor = anchorColumnName + return allColumns + .filter( + (c) => + c.name !== anchor && !c.workflowGroupId && !startBlockInputs.existingNames.has(c.name) + ) + .map((c) => c.name) + }, [allColumns, anchorColumnName, startBlockInputs]) + + const queryClient = useQueryClient() + const addInputsMutation = useMutation({ + mutationFn: async () => { + const wfId = selectedWorkflowId + const startBlockId = startBlockInputs.blockId + const state = workflowState.data as WorkflowStatePayload | null | undefined + if (!wfId || !startBlockId || !state || missingInputColumnNames.length === 0) { + throw new Error('Nothing to add') + } + const startBlock = state.blocks[startBlockId] + if (!startBlock) throw new Error('Start block missing from workflow') + + const newFields: InputFormatField[] = missingInputColumnNames.map((name) => { + const col = allColumns.find((c) => c.name === name) + return { + id: generateId(), + name, + type: tableColumnTypeToInputType(col?.type), + value: '', + collapsed: false, + } as InputFormatField & { id: string; collapsed: boolean } + }) + + const updatedSubBlock = { + ...(startBlock.subBlocks?.inputFormat ?? { id: 'inputFormat', type: 'input-format' }), + value: [...startBlockInputs.existing, ...newFields], + } + const updatedBlocks = { + ...state.blocks, + [startBlockId]: { + ...startBlock, + subBlocks: { ...startBlock.subBlocks, inputFormat: updatedSubBlock }, + }, + } + + const rawBody = { + blocks: updatedBlocks, + edges: state.edges, + loops: state.loops, + parallels: state.parallels, + lastSaved: state.lastSaved ?? Date.now(), + isDeployed: state.isDeployed ?? false, + } + // double-cast-allowed: WorkflowStatePayload is the loose local view of + // useWorkflowState; round-trip back to the strict PUT body shape. + const body = rawBody as unknown as WorkflowStateContractInput + await requestJson(putWorkflowNormalizedStateContract, { params: { id: wfId }, body }) + }, + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: workflowKeys.state(selectedWorkflowId) }) + }, + onError: (err) => { + toast.error(toError(err).message) + }, + }) + + const blockOutputGroups = useMemo(() => { + const state = workflowState.data as + | { + blocks?: Record + edges?: FlattenOutputsEdgeInput[] + } + | null + | undefined + if (!state?.blocks) return [] + + const blocks = Object.values(state.blocks) + const edges = state.edges ?? [] + const flat = flattenWorkflowOutputs(blocks, edges) + if (flat.length === 0) return [] + + const groupsByBlockId = new Map() + for (const f of flat) { + let group = groupsByBlockId.get(f.blockId) + if (!group) { + const blockConfig = getBlock(f.blockType) + const blockColor = blockConfig?.bgColor || '#2F55FF' + let blockIcon: string | React.ComponentType<{ className?: string }> = f.blockName + .charAt(0) + .toUpperCase() + if (blockConfig?.icon) blockIcon = blockConfig.icon + else if (f.blockType === 'loop') blockIcon = RepeatIcon + else if (f.blockType === 'parallel') blockIcon = SplitIcon + group = { + blockId: f.blockId, + blockName: f.blockName, + blockType: f.blockType, + blockIcon, + blockColor, + paths: [], + } + groupsByBlockId.set(f.blockId, group) + } + group.paths.push(f.path) + } + const distances = getBlockExecutionOrder(blocks, edges) + return Array.from(groupsByBlockId.values()).sort((a, b) => { + const da = distances[a.blockId] + const db = distances[b.blockId] + const sa = da === undefined || da < 0 ? Number.POSITIVE_INFINITY : da + const sb = db === undefined || db < 0 ? Number.POSITIVE_INFINITY : db + return sa - sb + }) + }, [workflowState.data]) + + const outputGroupOptions = useMemo( + () => + blockOutputGroups.map((group) => ({ + section: group.blockName, + sectionElement: ( +
+ + + {group.blockName} + +
+ ), + items: group.paths.map((path) => ({ + label: path, + value: encodeOutputValue(group.blockId, path), + })), + })), + [blockOutputGroups] + ) + + // Once the workflow's blocks are loaded, re-encode persisted `{blockId, path}` + // entries into the picker's encoded form. Stale entries (block deleted or + // path removed) are dropped silently — the user can re-pick on save. + if (!outputsHydrated && existingGroup?.outputs.length && blockOutputGroups.length > 0) { + const encoded: string[] = [] + if (config.mode === 'edit-output' && existingColumn) { + // Single-output sub-mode: only seed the picker with this column's mapping. + const own = existingGroup.outputs.find((o) => o.columnName === existingColumn.name) + if (own) { + const match = blockOutputGroups.find( + (g) => g.blockId === own.blockId && g.paths.includes(own.path) + ) + if (match) encoded.push(encodeOutputValue(own.blockId, own.path)) + } + } else { + for (const entry of existingGroup.outputs) { + const match = blockOutputGroups.find( + (g) => g.blockId === entry.blockId && g.paths.includes(entry.path) + ) + if (match) encoded.push(encodeOutputValue(entry.blockId, entry.path)) + } + } + setSelectedOutputs(encoded) + setOutputsHydrated(true) + } + + /** + * Builds the ordered, deduplicated `(blockId, path)` list from the picker + * state, sorted by execution order. + */ + function buildOrderedPickedOutputs(): Array<{ + blockId: string + path: string + leafType?: string + }> { + const seen = new Set() + const outputs: Array<{ blockId: string; path: string; leafType?: string }> = [] + for (const encoded of selectedOutputs) { + if (seen.has(encoded)) continue + seen.add(encoded) + outputs.push(decodeOutputValue(encoded)) + } + const wfState = workflowState.data as + | { + blocks?: Record + edges?: FlattenOutputsEdgeInput[] + } + | null + | undefined + if (wfState?.blocks) { + const blocks = Object.values(wfState.blocks) + const edges = wfState.edges ?? [] + const distances = getBlockExecutionOrder(blocks, edges) + const flat = flattenWorkflowOutputs(blocks, edges) + const indexInFlat = new Map( + flat.map((f, i) => [`${f.blockId}${OUTPUT_VALUE_SEPARATOR}${f.path}`, i]) + ) + const leafTypeByKey = new Map( + flat.map((f) => [`${f.blockId}${OUTPUT_VALUE_SEPARATOR}${f.path}`, f.leafType]) + ) + for (const o of outputs) { + o.leafType = leafTypeByKey.get(`${o.blockId}${OUTPUT_VALUE_SEPARATOR}${o.path}`) + } + outputs.sort((a, b) => { + const da = distances[a.blockId] + const db = distances[b.blockId] + const sa = da === undefined || da < 0 ? Number.POSITIVE_INFINITY : da + const sb = db === undefined || db < 0 ? Number.POSITIVE_INFINITY : db + if (sa !== sb) return sa - sb + const ia = + indexInFlat.get(`${a.blockId}${OUTPUT_VALUE_SEPARATOR}${a.path}`) ?? + Number.POSITIVE_INFINITY + const ib = + indexInFlat.get(`${b.blockId}${OUTPUT_VALUE_SEPARATOR}${b.path}`) ?? + Number.POSITIVE_INFINITY + return ia - ib + }) + } + return outputs + } + + const isEditOutputMode = config.mode === 'edit-output' + + async function handleSave() { + const trimmedName = columnNameInput.trim() + + const missing: string[] = [] + if (!selectedWorkflowId) missing.push('a workflow') + if (selectedWorkflowId && selectedOutputs.length === 0) missing.push('at least one output') + if (isEditOutputMode && !trimmedName) missing.push('a column name') + if (missing.length > 0) { + setShowValidation(true) + return + } + + try { + const orderedOutputs = buildOrderedPickedOutputs() + const dependencies: WorkflowGroupDependencies = { columns: deps } + + if (existingGroup) { + // edit-output: swap one column's source mapping (and optionally rename + // the column itself). edit-group: full add/remove diff against the + // group's existing outputs. + if (isEditOutputMode && existingColumn) { + const renamedColumn = + trimmedName !== existingColumn.name + ? { from: existingColumn.name, to: trimmedName } + : null + const newPick = orderedOutputs[0] + if (!newPick) throw new Error('Pick an output') + if (renamedColumn) { + await updateColumn.mutateAsync({ + columnName: renamedColumn.from, + updates: { name: renamedColumn.to }, + }) + onColumnRename?.(renamedColumn.from, renamedColumn.to) + } + // Reference the post-rename column name in mappingUpdates. The + // server applies the mapping swap and clears the column's row data + // so the next workflow run repopulates from the new source. + const targetColumnName = renamedColumn?.to ?? existingColumn.name + await updateWorkflowGroup.mutateAsync({ + groupId: existingGroup.id, + workflowId: selectedWorkflowId, + name: existingGroup.name, + dependencies, + mappingUpdates: [ + { columnName: targetColumnName, blockId: newPick.blockId, path: newPick.path }, + ], + }) + toast.success(`Saved "${targetColumnName}"`) + } else { + // edit-group: full output diff with new-column derivation. + const taken = new Set(allColumns.map((c) => c.name)) + const fullOutputs: WorkflowGroupOutput[] = [] + const newOutputColumns: NonNullable = [] + for (const o of orderedOutputs) { + const existingOut = existingGroup.outputs.find( + (e) => e.blockId === o.blockId && e.path === o.path + ) + if (existingOut) { + fullOutputs.push(existingOut) + } else { + const colName = deriveOutputColumnName(o.path, taken) + taken.add(colName) + fullOutputs.push({ blockId: o.blockId, path: o.path, columnName: colName }) + newOutputColumns.push({ + name: colName, + type: columnTypeForLeaf(o.leafType), + required: false, + unique: false, + workflowGroupId: existingGroup.id, + }) + } + } + await updateWorkflowGroup.mutateAsync({ + groupId: existingGroup.id, + workflowId: selectedWorkflowId, + name: existingGroup.name, + dependencies, + outputs: fullOutputs, + ...(newOutputColumns.length > 0 ? { newOutputColumns } : {}), + autoRun, + }) + toast.success(`Saved "${existingGroup.name ?? 'Workflow'}"`) + } + } else { + // Create path: brand-new group with auto-derived output column names. + const groupId = generateId() + const taken = new Set(allColumns.map((c) => c.name)) + const newOutputColumns: AddWorkflowGroupBodyInput['outputColumns'] = [] + const groupOutputs: WorkflowGroupOutput[] = [] + for (const o of orderedOutputs) { + const colName = deriveOutputColumnName(o.path, taken) + taken.add(colName) + newOutputColumns.push({ + name: colName, + type: columnTypeForLeaf(o.leafType), + required: false, + unique: false, + workflowGroupId: groupId, + }) + groupOutputs.push({ blockId: o.blockId, path: o.path, columnName: colName }) + } + const workflowName = workflows?.find((w) => w.id === selectedWorkflowId)?.name ?? 'Workflow' + const group: WorkflowGroup = { + id: groupId, + workflowId: selectedWorkflowId, + name: workflowName, + dependencies, + outputs: groupOutputs, + autoRun, + } + await addWorkflowGroup.mutateAsync({ group, outputColumns: newOutputColumns }) + toast.success(`Added "${workflowName}"`) + } + onClose() + } catch (err) { + if (isValidationError(err)) { + const nameIssue = + findValidationIssue(err, ['updates', 'name']) ?? + findValidationIssue(err, ['name']) ?? + findValidationIssue(err, ['columnName']) + if (nameIssue) { + setNameError(nameIssue.message) + return + } + } + toast.error(toError(err).message) + } + } + + const saveDisabled = + addWorkflowGroup.isPending || updateWorkflowGroup.isPending || updateColumn.isPending + const titleByMode = { + create: 'Add workflow', + 'edit-group': 'Configure workflow', + 'edit-output': 'Configure output column', + } as const + + // edit-output mode is single-select on the output picker; everywhere else + // is multi-select. Same Combobox shape, different mode. + const outputPickerSingleSelect = isEditOutputMode + + return ( +
+
+

+ {titleByMode[config.mode]} +

+ +
+ +
+ {/* Single-output mode renames this column directly. */} + {isEditOutputMode && ( + <> +
+ Column name + { + setColumnNameInput(e.target.value) + if (nameError) setNameError(null) + }} + spellCheck={false} + autoComplete='off' + aria-invalid={ + (showValidation && !columnNameInput.trim()) || nameError ? true : undefined + } + /> + {showValidation && !columnNameInput.trim() && ( + + )} + {nameError && !(showValidation && !columnNameInput.trim()) && ( + + )} +
+ + + )} + + {selectedWorkflowId && ( + <> +
+
+ + {startBlockInputs.blockId && missingInputColumnNames.length > 0 && ( + + + + + + Adds {missingInputColumnNames.join(', ')} to the workflow's Start block + + + )} +
+
+ {workflowState.isLoading ? ( +
+ +
+ ) : workflowState.data ? ( + <> +
+ +
+ + + + + Open workflow + + + ) : ( +
+ + Unable to load preview + +
+ )} +
+
+ + + )} + +
+ Workflow + ({ label: wf.name, value: wf.id })) ?? []} + value={selectedWorkflowId} + onChange={(v) => setSelectedWorkflowId(v)} + placeholder='Select a workflow' + disabled={!workflows || workflows.length === 0 || isEditOutputMode} + emptyMessage='No manual triggers configured' + maxHeight={260} + searchable + searchPlaceholder='Search workflows...' + error={showValidation && !selectedWorkflowId ? 'Select a workflow' : null} + /> + {showValidation && !selectedWorkflowId && } +
+ + + +
+ {isEditOutputMode ? 'Output' : 'Output columns'} + setSelectedOutputs(v ? [v] : []), + } + : { + multiSelectValues: selectedOutputs, + onMultiSelectChange: setSelectedOutputs, + overlayContent: ( + + {selectedOutputs.length === 0 + ? 'Select outputs' + : `${selectedOutputs.length} selected`} + + ), + })} + /> + {showValidation && selectedWorkflowId && selectedOutputs.length === 0 && ( + + )} +
+ + {!isEditOutputMode && ( + <> + +
+ + setAutoRun(!!v)} + /> +
+ {autoRun && ( + <> + + + + )} + + )} +
+ +
+ + +
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/index.ts index 762cc58de9d..d207594366d 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/index.ts @@ -1,3 +1,2 @@ export * from './use-context-menu' -export * from './use-row-execution' export * from './use-table' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-row-execution.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-row-execution.ts deleted file mode 100644 index 639680919db..00000000000 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-row-execution.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { useCallback } from 'react' -import { createLogger } from '@sim/logger' -import { useMutation, useQueryClient } from '@tanstack/react-query' -import { toast } from '@/components/emcn' -import { requestJson } from '@/lib/api/client/request' -import { runRowWorkflowGroupContract } from '@/lib/api/contracts/tables' -import type { RowExecutionMetadata } from '@/lib/table' -import { - restoreCachedWorkflowCells, - snapshotAndMutateRows, - tableKeys, -} from '@/hooks/queries/tables' - -const logger = createLogger('useRowExecution') - -export interface RunWorkflowGroupParams { - tableId: string - rowId: string - workspaceId: string - groupId: string - /** Group's workflow id — used as the optimistic execution's `workflowId` - * when the row hasn't run this group before. */ - workflowId: string - /** Output column names produced by the group; cleared optimistically so - * stale values from the previous run don't linger in the UI before the - * server response writes the cleared row back. */ - outputColumnNames: string[] -} - -interface UseRowExecutionReturn { - runWorkflowGroup: (params: RunWorkflowGroupParams) => Promise -} - -/** - * Single-row workflow-group runner. Optimistically flips - * `executions[groupId]` to `pending` for the targeted row before the network - * round-trip so the spinner appears instantly. Cache invalidation lives in - * `onSettled` so failed starts still refresh the rows query — otherwise a row - * stuck in stale state would remain in the UI until the next refetch. - */ -export function useRowExecution(): UseRowExecutionReturn { - const queryClient = useQueryClient() - - const mutation = useMutation({ - mutationFn: async (params: RunWorkflowGroupParams) => { - return requestJson(runRowWorkflowGroupContract, { - params: { tableId: params.tableId, rowId: params.rowId }, - body: { workspaceId: params.workspaceId, groupId: params.groupId }, - }) - }, - onMutate: async (params) => { - const snapshots = await snapshotAndMutateRows(queryClient, params.tableId, (r) => { - if (r.id !== params.rowId) return null - const exec = r.executions?.[params.groupId] as RowExecutionMetadata | undefined - const pending: RowExecutionMetadata = { - status: 'pending', - executionId: exec?.executionId ?? null, - jobId: null, - workflowId: exec?.workflowId ?? params.workflowId, - error: null, - } - const nextData = { ...r.data } - for (const colName of params.outputColumnNames) nextData[colName] = null - return { - ...r, - data: nextData, - executions: { ...(r.executions ?? {}), [params.groupId]: pending }, - } - }) - return { snapshots } - }, - onError: (err, _params, context) => { - if (context?.snapshots) restoreCachedWorkflowCells(queryClient, context.snapshots) - const message = err instanceof Error ? err.message : 'Unknown error' - logger.error('Run workflow group failed:', err) - toast.error(`Failed to run workflow: ${message}`) - }, - onSettled: (_data, _err, params) => { - queryClient.invalidateQueries({ queryKey: tableKeys.rowsRoot(params.tableId) }) - }, - }) - - const runWorkflowGroup = useCallback( - async (params: RunWorkflowGroupParams) => { - await mutation.mutateAsync(params).catch(() => { - // onError already toasted; swallow so callers can fire-and-forget. - }) - }, - // mutateAsync is stable in TanStack Query v5 - // eslint-disable-next-line react-hooks/exhaustive-deps - [] - ) - - return { runWorkflowGroup } -} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.ts index 258718d8310..b3d5b311ad4 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.ts @@ -1,24 +1,15 @@ 'use client' -import { useCallback, useEffect, useMemo } from 'react' -import { useQueryClient } from '@tanstack/react-query' -import type { - ColumnDefinition, - RowData, - RowExecutions, - TableDefinition, - TableRow, - WorkflowGroup, -} from '@/lib/table' +import { useCallback, useMemo } from 'react' +import type { ColumnDefinition, TableDefinition, TableRow, WorkflowGroup } from '@/lib/table' import { TABLE_LIMITS } from '@/lib/table/constants' import type { FlattenOutputsBlockInput } from '@/lib/workflows/blocks/flatten-outputs' -import { useSocket } from '@/app/workspace/providers/socket-provider' import { getBlock } from '@/blocks' -import { tableKeys, useInfiniteTableRows, useTable as useTableQuery } from '@/hooks/queries/tables' +import { useInfiniteTableRows, useTable as useTableQuery } from '@/hooks/queries/tables' import { useWorkflowStates, useWorkflows } from '@/hooks/queries/workflows' import type { WorkflowMetadata } from '@/stores/workflows/registry/types' import type { WorkflowState } from '@/stores/workflows/workflow/types' -import type { BlockIconInfo, ColumnSourceInfo } from '../components/table/types' +import type { BlockIconInfo, ColumnSourceInfo } from '../components/table-grid/types' import type { QueryOptions } from '../types' const EMPTY_COLUMNS: ColumnDefinition[] = [] @@ -61,8 +52,6 @@ export interface UseTableReturn { /** Pre-resolved icon + block-name info per output column name. Headers read * from this map instead of each subscribing to its own workflow-state query. */ columnSourceInfo: Map - /** `workflowId → workflow.name` lookup for cell labels and execution-detail copy. */ - workflowNameById: Record } /** @@ -77,7 +66,6 @@ export interface UseTableReturn { * single hook return and re-render the world. */ export function useTable({ workspaceId, tableId, queryOptions }: UseTableParams): UseTableReturn { - const queryClient = useQueryClient() const { data: tableData, isLoading: isLoadingTable } = useTableQuery(workspaceId, tableId) const { @@ -113,110 +101,6 @@ export function useTable({ workspaceId, tableId, queryOptions }: UseTableParams) return { hasNextPage: Boolean(result.hasNextPage) } }, [fetchNextPage]) - // Realtime sync: merge `table-row-updated` / `table-row-deleted` socket - // events into the infinite-query cache so cell updates land without - // polling. While any mutation is in flight, defer to a stale-mark — the - // optimistic update guard wins until `onSettled` invalidates. - const { joinTable, leaveTable, onTableRowUpdated, onTableRowDeleted } = useSocket() - useEffect(() => { - if (!tableId) return - joinTable(tableId) - - type Page = { rows: TableRow[]; totalCount: number | null } - type InfiniteData = { pages: Page[]; pageParams: number[] } | undefined - - onTableRowUpdated((event) => { - if (event.tableId !== tableId) return - if (queryClient.isMutating() > 0) { - queryClient.invalidateQueries({ - queryKey: tableKeys.rowsRoot(tableId), - refetchType: 'none', - }) - return - } - queryClient.setQueriesData( - { queryKey: tableKeys.rowsRoot(tableId) }, - (current) => { - if (!current) return current - const incoming: TableRow = { - id: event.rowId, - data: event.data as RowData, - executions: (event.executions as RowExecutions) ?? {}, - position: event.position, - createdAt: '', - updatedAt: - typeof event.updatedAt === 'string' ? event.updatedAt : String(event.updatedAt), - } - let landed = false - const nextPages = current.pages.map((page) => { - const idx = page.rows.findIndex((r) => r.id === event.rowId) - if (idx === -1) return page - landed = true - const merged = { - ...page.rows[idx], - data: incoming.data, - executions: incoming.executions, - updatedAt: incoming.updatedAt, - } - const nextRows = [...page.rows] - nextRows[idx] = merged - return { ...page, rows: nextRows } - }) - if (landed) return { ...current, pages: nextPages } - // Row not in any cached page yet — append to the last page so it - // shows up immediately. The next refetch will reorder by position. - if (current.pages.length === 0) return current - const lastIdx = current.pages.length - 1 - const lastPage = current.pages[lastIdx] - const updatedLast: Page = { - ...lastPage, - rows: [...lastPage.rows, incoming], - totalCount: lastPage.totalCount === null ? null : lastPage.totalCount + 1, - } - const pagesWithAppend = [...current.pages] - pagesWithAppend[lastIdx] = updatedLast - return { ...current, pages: pagesWithAppend } - } - ) - }) - - onTableRowDeleted((event) => { - if (event.tableId !== tableId) return - if (queryClient.isMutating() > 0) { - queryClient.invalidateQueries({ - queryKey: tableKeys.rowsRoot(tableId), - refetchType: 'none', - }) - return - } - queryClient.setQueriesData( - { queryKey: tableKeys.rowsRoot(tableId) }, - (current) => { - if (!current) return current - let removed = false - const nextPages = current.pages.map((page) => { - const next = page.rows.filter((r) => r.id !== event.rowId) - if (next.length === page.rows.length) return page - removed = true - return { - ...page, - rows: next, - totalCount: page.totalCount === null ? null : Math.max(0, page.totalCount - 1), - } - }) - if (!removed) return current - return { ...current, pages: nextPages } - } - ) - }) - - return () => { - leaveTable() - } - // joinTable / leaveTable / on* are stable callbacks; tableId is the only real dep. - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [tableId]) - const { data: workflows } = useWorkflows(workspaceId) const columns = useMemo( @@ -251,14 +135,6 @@ export function useTable({ workspaceId, tableId, queryOptions }: UseTableParams) return map }, [tableWorkflowGroups, workflowStates]) - const workflowNameById = useMemo(() => { - const map: Record = {} - for (const wf of workflows ?? []) { - map[wf.id] = wf.name - } - return map - }, [workflows]) - return { tableData, isLoadingTable, @@ -273,6 +149,5 @@ export function useTable({ workspaceId, tableId, queryOptions }: UseTableParams) tableWorkflowGroups, workflowStates, columnSourceInfo, - workflowNameById, } } diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/page.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/page.tsx index 8ea13d7f0c5..5ce3b7d9dd3 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/page.tsx @@ -1,5 +1,5 @@ import type { Metadata } from 'next' -import { Table } from './components' +import { Table } from './table' export const metadata: Metadata = { title: 'Table', diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table.tsx new file mode 100644 index 00000000000..e9fea357e6b --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table.tsx @@ -0,0 +1,685 @@ +'use client' + +import { useCallback, useMemo, useReducer, useRef, useState } from 'react' +import { createLogger } from '@sim/logger' +import { useParams, useRouter } from 'next/navigation' +import { + Button, + Modal, + ModalBody, + ModalContent, + ModalFooter, + ModalHeader, + toast, +} from '@/components/emcn' +import { Download, Pencil, Table as TableIcon, Trash, Upload } from '@/components/emcn/icons' +import type { RunMode } from '@/lib/api/contracts/tables' +import type { ColumnDefinition, Filter, TableRow as TableRowType } from '@/lib/table' +import { + type ColumnOption, + ResourceHeader, + ResourceOptionsBar, + type SortConfig, +} from '@/app/workspace/[workspaceId]/components' +import { LogDetails } from '@/app/workspace/[workspaceId]/logs/components' +import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' +import { ImportCsvDialog } from '@/app/workspace/[workspaceId]/tables/components/import-csv-dialog' +import { useLogByExecutionId } from '@/hooks/queries/logs' +import { + downloadTableExport, + useCancelTableRuns, + useDeleteTable, + useRenameTable, + useRunColumn, +} from '@/hooks/queries/tables' +import { useInlineRename } from '@/hooks/use-inline-rename' +import { useLogDetailsUIStore } from '@/stores/logs/store' +import type { DeletedRowSnapshot } from '@/stores/table/types' +import { + type ColumnConfig, + ColumnConfigSidebar, + NewColumnDropdown, + RowModal, + RunStatusControl, + type SelectionSnapshot, + TableActionBar, + TableFilter, + TableGrid, + type WorkflowConfig, + WorkflowSidebar, +} from './components' +import { COLUMN_SIDEBAR_WIDTH } from './components/table-grid/constants' +import { COLUMN_TYPE_ICONS } from './components/table-grid/headers' +import { useTable } from './hooks' +import type { QueryOptions } from './types' +import { generateColumnName } from './utils' + +const logger = createLogger('Table') + +interface TableProps { + /** When set, the table renders without its page header / breadcrumbs / page-level + * options bar. Used by the mothership chat panel to embed a table inline. */ + embedded?: boolean + /** Identifiers — only set in embedded mode. Page mode reads from `useParams()`. */ + workspaceId?: string + tableId?: string +} + +/** + * Discriminated union encoding the at-most-one-open invariant for the three + * right-edge slideout panels. Driven by a `useReducer` so every transition + * goes through one place — opening a column config can't accidentally leave a + * workflow config open. + */ +type SlideoutState = + | { kind: 'none' } + | { kind: 'column'; config: ColumnConfig } + | { kind: 'workflow'; config: WorkflowConfig } + | { kind: 'execution'; executionId: string } + +type SlideoutAction = + | { type: 'OPEN_COLUMN'; config: ColumnConfig } + | { type: 'OPEN_WORKFLOW'; config: WorkflowConfig } + | { type: 'OPEN_EXECUTION'; executionId: string } + | { type: 'CLOSE' } + +function slideoutReducer(_state: SlideoutState, action: SlideoutAction): SlideoutState { + switch (action.type) { + case 'OPEN_COLUMN': + return { kind: 'column', config: action.config } + case 'OPEN_WORKFLOW': + return { kind: 'workflow', config: action.config } + case 'OPEN_EXECUTION': + return { kind: 'execution', executionId: action.executionId } + case 'CLOSE': + return { kind: 'none' } + } +} + +/** + * Page-level wrapper for the table detail view. Mirrors the shape of + * `logs/logs.tsx`: a thin orchestrator that composes the data grid (``) + * and the page-level surface (sidebars, modals, action bar, breadcrumbs). + * + * Owns the at-most-one-open invariant for the three slideout panels (column + * config, workflow config, execution details) via a single reducer. The grid + * emits open requests via callbacks; the wrapper renders the panels. + * + * Embedded mode skips the page header but otherwise renders the same surface. + */ +export function Table({ + embedded, + workspaceId: propWorkspaceId, + tableId: propTableId, +}: TableProps = {}) { + const params = useParams() + const router = useRouter() + const workspaceId = propWorkspaceId || (params.workspaceId as string) + const tableId = propTableId || (params.tableId as string) + + const [slideout, dispatch] = useReducer(slideoutReducer, { kind: 'none' }) + const [showDeleteTableConfirm, setShowDeleteTableConfirm] = useState(false) + const [isImportCsvOpen, setIsImportCsvOpen] = useState(false) + const [editingRow, setEditingRow] = useState(null) + const [deletingRows, setDeletingRows] = useState([]) + const [deletingColumns, setDeletingColumns] = useState(null) + const [selection, setSelection] = useState({ + actionBarRowIds: [], + runningInActionBarSelection: 0, + totalRunning: 0, + hasWorkflowColumns: false, + selectedRunScope: null, + selectionStats: { hasIncompleteOrFailed: false, hasCompleted: false, hasInFlight: false }, + singleWorkflowCell: null, + }) + const [queryOptions, setQueryOptions] = useState({ filter: null, sort: null }) + const [filterOpen, setFilterOpen] = useState(false) + + const userPermissions = useUserPermissionsContext() + + const onOpenColumnConfig = useCallback((config: ColumnConfig) => { + dispatch({ type: 'OPEN_COLUMN', config }) + }, []) + const onOpenWorkflowConfig = useCallback((config: WorkflowConfig) => { + dispatch({ type: 'OPEN_WORKFLOW', config }) + }, []) + const onOpenExecutionDetails = useCallback((executionId: string) => { + dispatch({ type: 'OPEN_EXECUTION', executionId }) + }, []) + const onCloseSlideout = () => dispatch({ type: 'CLOSE' }) + const onOpenRowModal = (row: TableRowType) => setEditingRow(row) + // useCallback because is memo-wrapped — these flow into + // the breadcrumbs / headerActions memos, whose identity drives that re-render. + const onRequestDeleteTable = useCallback(() => setShowDeleteTableConfirm(true), []) + const onRequestImportCsv = useCallback(() => setIsImportCsvOpen(true), []) + // Used inside grid's `useCallback` deps — identity stability prevents the + // grid's `useCallback` from re-creating on every wrapper re-render. + const onRequestDeleteRows = useCallback((snapshots: DeletedRowSnapshot[]) => { + setDeletingRows(snapshots) + }, []) + const onRequestDeleteColumns = useCallback((names: string[]) => { + setDeletingColumns(names) + }, []) + + /** + * Sink populated by the grid: invoked from sidebar `onColumnRename` so the + * grid can rewrite its local `columnWidths` / `columnOrder` keys after a + * rename. The grid's render assigns to `current`; the wrapper forwards calls. + */ + const columnRenameSinkRef = useRef<((oldName: string, newName: string) => void) | null>(null) + const onColumnRename = (oldName: string, newName: string) => { + columnRenameSinkRef.current?.(oldName, newName) + } + + /** + * Sink the grid populates with its post-row-delete cleanup (push undo, + * clear selection). The wrapper invokes after the row-delete modal's + * mutation succeeds. + */ + const afterDeleteRowsSinkRef = useRef<((snapshots: DeletedRowSnapshot[]) => void) | null>(null) + + /** + * Sink the grid populates with its full delete-columns cascade (per-column + * mutation, undo push, columnOrder + columnWidths cleanup). The wrapper's + * delete-columns confirmation modal invokes this on confirm. + */ + const confirmDeleteColumnsSinkRef = useRef<((names: string[]) => void) | null>(null) + + /** + * Sink the grid populates with its `pushUndo({ type: 'rename-table', ... })` + * call so the wrapper's breadcrumb rename can register an undo entry on the + * grid's undo stack. + */ + const pushTableRenameUndoSinkRef = useRef< + ((previousName: string, newName: string) => void) | null + >(null) + + // Single source of truth for `useTable` — drives both the grid render and + // the wrapper's slideouts/modals. The grid receives the bundle as props. + const { tableData, columns, tableWorkflowGroups, workflows } = useTable({ + workspaceId, + tableId, + queryOptions, + }) + + const runColumnMutation = useRunColumn({ workspaceId, tableId }) + const cancelRunsMutation = useCancelTableRuns({ workspaceId, tableId }) + const runColumnMutate = runColumnMutation.mutate + const cancelRunsMutate = cancelRunsMutation.mutate + + // Canonical run dispatcher. Every UI gesture (column-header menu, per-row + // gutter, action-bar Play/Refresh, right-click context menu) reduces to a + // (groupIds, rowIds?, runMode) triple. Empty groupIds = no-op. + const runScope = useCallback( + (args: { groupIds: string[]; rowIds?: string[]; runMode: RunMode }) => { + if (args.groupIds.length === 0) return + if (args.rowIds && args.rowIds.length === 0) return + runColumnMutate(args) + }, + [runColumnMutate] + ) + + const onRunColumn = useCallback( + (groupId: string, runMode: RunMode, rowIds?: string[]) => { + runScope({ groupIds: [groupId], rowIds, runMode }) + }, + [runScope] + ) + + const onRunRows = useCallback( + (rowIds: string[], runMode: RunMode) => { + runScope({ groupIds: tableWorkflowGroups.map((g) => g.id), rowIds, runMode }) + }, + [runScope, tableWorkflowGroups] + ) + + const onRunRow = useCallback( + (rowId: string) => { + runScope({ + groupIds: tableWorkflowGroups.map((g) => g.id), + rowIds: [rowId], + runMode: 'incomplete', + }) + }, + [runScope, tableWorkflowGroups] + ) + + // useCallback because is React.memo-wrapped — identity stability + // matters for per-row gutter Stop button. + const onStopRow = useCallback( + (rowId: string) => { + cancelRunsMutate({ scope: 'row', rowId }) + }, + [cancelRunsMutate] + ) + + const onStopRows = (rowIds: string[]) => { + if (rowIds.length === 0) return + for (const rowId of rowIds) { + cancelRunsMutate({ scope: 'row', rowId }) + } + } + + // useCallback because is memo-wrapped. + const onStopAll = useCallback(() => { + cancelRunsMutate({ scope: 'all' }) + }, [cancelRunsMutate]) + + const onSelectionChange = (next: SelectionSnapshot) => { + setSelection(next) + } + + const renameTableMutation = useRenameTable(workspaceId) + const tableDataRef = useRef(tableData) + tableDataRef.current = tableData + const tableHeaderRename = useInlineRename({ + onSave: (_id, name) => { + const data = tableDataRef.current + if (data) pushTableRenameUndoSinkRef.current?.(data.name, name) + renameTableMutation.mutate({ tableId, name }) + }, + }) + + const handleNavigateBack = useCallback(() => { + router.push(`/workspace/${workspaceId}/tables`) + }, [router, workspaceId]) + + const handleStartTableRename = useCallback(() => { + const data = tableDataRef.current + if (data) tableHeaderRename.startRename(tableId, data.name) + }, [tableHeaderRename.startRename, tableId]) + + const handleAddColumnOfType = (type: ColumnDefinition['type']) => { + onOpenColumnConfig({ mode: 'create', proposedName: generateColumnName(columns), type }) + } + + const handleAddWorkflowColumn = () => { + onOpenWorkflowConfig({ mode: 'create', proposedName: generateColumnName(columns) }) + } + + const handleExportCsv = useCallback(async () => { + if (!tableData) return + try { + await downloadTableExport(tableData.id, tableData.name) + } catch (err) { + logger.error('Failed to export table:', err) + toast.error('Failed to export table') + } + }, [tableData]) + + const columnOptions = useMemo( + () => + columns.map((col) => ({ + id: col.name, + label: col.name, + type: col.type, + icon: COLUMN_TYPE_ICONS[col.type], + })), + [columns] + ) + + const sortConfig = useMemo(() => { + let active: SortConfig['active'] = null + if (queryOptions.sort) { + const entries = Object.entries(queryOptions.sort) + if (entries.length > 0) { + const [column, direction] = entries[0] + active = { column, direction } + } + } + return { + options: columnOptions, + active, + onSort: (column, direction) => + setQueryOptions((prev) => ({ ...prev, sort: { [column]: direction } })), + onClear: () => setQueryOptions((prev) => ({ ...prev, sort: null })), + } + }, [columnOptions, queryOptions.sort]) + + const handleFilterApply = (filter: Filter | null) => { + setQueryOptions((prev) => ({ ...prev, filter })) + } + + const breadcrumbs = useMemo( + () => [ + { label: 'Tables', onClick: handleNavigateBack }, + { + label: tableData?.name ?? '', + editing: tableHeaderRename.editingId + ? { + isEditing: true, + value: tableHeaderRename.editValue, + onChange: tableHeaderRename.setEditValue, + onSubmit: tableHeaderRename.submitRename, + onCancel: tableHeaderRename.cancelRename, + } + : undefined, + dropdownItems: [ + { + label: 'Rename', + icon: Pencil, + disabled: !tableData, + onClick: handleStartTableRename, + }, + { + label: 'Delete', + icon: Trash, + disabled: !tableData, + onClick: onRequestDeleteTable, + }, + ], + }, + ], + [ + handleNavigateBack, + tableData, + tableHeaderRename.editingId, + tableHeaderRename.editValue, + tableHeaderRename.setEditValue, + tableHeaderRename.submitRename, + tableHeaderRename.cancelRename, + handleStartTableRename, + onRequestDeleteTable, + ] + ) + + const headerActions = useMemo( + () => + tableData + ? [ + { + label: 'Import CSV', + icon: Upload, + onClick: onRequestImportCsv, + disabled: userPermissions.canEdit !== true, + }, + { + label: 'Export CSV', + icon: Download, + onClick: () => void handleExportCsv(), + disabled: tableData.rowCount === 0, + }, + ] + : undefined, + [tableData, userPermissions.canEdit, handleExportCsv, onRequestImportCsv] + ) + + const createTrigger = userPermissions.canEdit ? ( + + ) : null + + const logPanelWidth = useLogDetailsUIStore((state) => state.panelWidth) + const sidebarReservedPx = + slideout.kind === 'column' || slideout.kind === 'workflow' + ? COLUMN_SIDEBAR_WIDTH + : slideout.kind === 'execution' + ? logPanelWidth + : 0 + + const deleteTableMutation = useDeleteTable(workspaceId) + const handleDeleteTable = async () => { + try { + await deleteTableMutation.mutateAsync(tableId) + setShowDeleteTableConfirm(false) + router.push(`/workspace/${workspaceId}/tables`) + } catch { + setShowDeleteTableConfirm(false) + } + } + + const columnConfig = slideout.kind === 'column' ? slideout.config : null + const workflowConfig = slideout.kind === 'workflow' ? slideout.config : null + const executionId = slideout.kind === 'execution' ? slideout.executionId : null + // Fetch the workflow log when the execution-details slideout is open. Reuses + // the logs page's directly — no intermediate wrapper needed for + // a one-line query forward. + const { data: executionLog } = useLogByExecutionId(workspaceId, executionId) + + return ( +
+ {!embedded && ( + <> + 0 ? ( + + ) : null + } + /> + setFilterOpen((prev) => !prev)} + filterActive={filterOpen || !!queryOptions.filter} + /> + {filterOpen && ( + setFilterOpen(false)} + /> + )} + + )} + + {userPermissions.canEdit && ( + + selection.selectedRunScope && + runScope({ ...selection.selectedRunScope, runMode: 'incomplete' }) + } + onRefresh={() => + selection.selectedRunScope && + runScope({ ...selection.selectedRunScope, runMode: 'all' }) + } + onStopWorkflows={() => + selection.selectedRunScope && onStopRows(selection.selectedRunScope.rowIds) + } + onViewExecution={ + selection.singleWorkflowCell?.canViewExecution && + selection.singleWorkflowCell.executionId + ? () => { + const id = selection.singleWorkflowCell?.executionId + if (id) onOpenExecutionDetails(id) + } + : undefined + } + /> + )} + c.name === columnConfig.columnName) ?? null) + : null + } + workspaceId={workspaceId} + tableId={tableId} + onColumnRename={onColumnRename} + /> + + + {tableData && ( + + )} + {editingRow && tableData && ( + setEditingRow(null)} + table={tableData} + row={editingRow} + onSuccess={() => setEditingRow(null)} + /> + )} + {deletingRows.length > 0 && tableData && ( + setDeletingRows([])} + table={tableData} + rowIds={deletingRows.map((r) => r.rowId)} + onSuccess={() => { + afterDeleteRowsSinkRef.current?.(deletingRows) + setDeletingRows([]) + }} + /> + )} + { + if (!open) setDeletingColumns(null) + }} + > + + + {deletingColumns && deletingColumns.length > 1 + ? `Delete ${deletingColumns.length} Columns` + : 'Delete Column'} + + +

+ {deletingColumns && deletingColumns.length > 1 ? ( + <> + Are you sure you want to delete{' '} + + {deletingColumns.length} columns + + ?{' '} + + ) : ( + <> + Are you sure you want to delete{' '} + + {deletingColumns?.[0]} + + ?{' '} + + )} + + This will remove all data in{' '} + {deletingColumns && deletingColumns.length > 1 ? 'these columns' : 'this column'}. + {' '} + You can undo this action. +

+
+ + + + +
+
+ {!embedded && ( + + + Delete Table + +

+ Are you sure you want to delete{' '} + {tableData?.name}?{' '} + + All {tableData?.rowCount ?? 0} rows will be removed. + {' '} + You can restore it from Recently Deleted in Settings. +

+
+ + + + +
+
+ )} +
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/utils.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/utils.ts index ce561a013b8..75c57b61999 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/utils.ts @@ -2,6 +2,21 @@ import type { ColumnDefinition } from '@/lib/table' type BadgeVariant = 'green' | 'blue' | 'purple' | 'orange' | 'teal' | 'gray' +/** + * Pick a fresh "untitled[_N]" name not already taken by `columns`. Used by + * both the page-header and inline-header "New column" dropdowns. + */ +export function generateColumnName(columns: ReadonlyArray<{ name: string }>): string { + const existing = new Set(columns.map((c) => c.name.toLowerCase())) + let name = 'untitled' + let i = 2 + while (existing.has(name.toLowerCase())) { + name = `untitled_${i}` + i++ + } + return name +} + /** * Returns the appropriate badge color variant for a column type */ @@ -51,7 +66,11 @@ export function cleanCellValue(value: unknown, column: ColumnDefinition): unknow } /** - * Format a stored value for display in an input field. + * Format a stored value for display in an input field. Defensive against + * shape drift: a column whose declared type lags its actual data (e.g. a + * workflow column mid-remap, where the schema cache hasn't refetched but + * row data already has the new mapping's value) would otherwise render + * `[object Object]` via `String(value)`. */ export function formatValueForInput(value: unknown, type: string): string { if (value === null || value === undefined) return '' @@ -69,6 +88,7 @@ export function formatValueForInput(value: unknown, type: string): string { return str } } + if (typeof value === 'object') return JSON.stringify(value) return String(value) } diff --git a/apps/sim/app/workspace/[workspaceId]/tables/components/import-csv-dialog/import-csv-dialog.tsx b/apps/sim/app/workspace/[workspaceId]/tables/components/import-csv-dialog/import-csv-dialog.tsx index 9da13375a22..166f7dc06c9 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/components/import-csv-dialog/import-csv-dialog.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/components/import-csv-dialog/import-csv-dialog.tsx @@ -46,8 +46,8 @@ const CREATE_VALUE = '__ create __' /** * Converts the verbose backend error messages into a short, human-friendly * summary suitable for the modal footer. Specifically collapses repeated - * `Row N: Column "X" must be unique. Value "Y" already exists in row row_abc` - * segments into a single concise summary without internal row IDs. + * `Row N: Column "X" must be unique. Value "Y" already exists in row M` + * segments into a single concise summary. */ function summarizeImportError(message: string): string { const uniqueMatches = [ @@ -75,9 +75,9 @@ function summarizeImportError(message: string): string { return rowLimitMatch[0].trim() } - const stripped = message.replace(/\s+in row\s+row_[a-f0-9]+/gi, '').trim() - if (stripped.length > 180) return `${stripped.slice(0, 177)}...` - return stripped + const trimmed = message.trim() + if (trimmed.length > 180) return `${trimmed.slice(0, 177)}...` + return trimmed } interface ImportCsvDialogProps { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/mcp-dynamic-args/mcp-dynamic-args.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/mcp-dynamic-args/mcp-dynamic-args.tsx index d197ba041da..97d286ac375 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/mcp-dynamic-args/mcp-dynamic-args.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/mcp-dynamic-args/mcp-dynamic-args.tsx @@ -1,7 +1,7 @@ import { useCallback } from 'react' import { createLogger } from '@sim/logger' import { useParams } from 'next/navigation' -import { Combobox, Label, Slider, Switch } from '@/components/emcn/components' +import { Combobox, FieldDivider, Label, Slider, Switch } from '@/components/emcn/components' import { LongInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/long-input/long-input' import { ShortInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/short-input/short-input' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' @@ -357,17 +357,7 @@ export function McpDynamicArgs({ )} {renderParameterInput(paramName, paramSchema as any)} - {showDivider && ( -
-
-
- )} + {showDivider && }
) })} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/editor.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/editor.tsx index 1753bc2da46..de88bd656b5 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/editor.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/editor.tsx @@ -16,7 +16,7 @@ import { useParams } from 'next/navigation' import { usePostHog } from 'posthog-js/react' import { useShallow } from 'zustand/react/shallow' import { useStoreWithEqualityFn } from 'zustand/traditional' -import { Button, Loader, Tooltip } from '@/components/emcn' +import { Button, FieldDivider, Loader, Tooltip } from '@/components/emcn' import { captureEvent } from '@/lib/posthog/client' import { buildCanonicalIndex, @@ -542,9 +542,7 @@ export function Editor() { )} -
-
-
+ )} {subBlocks.length === 0 && !isWorkflowBlock ? ( @@ -605,11 +603,7 @@ export function Editor() { : undefined } /> - {showDivider && ( -
-
-
- )} + {showDivider && }
) })} @@ -660,9 +654,7 @@ export function Editor() { allowExpandInPreview={false} /> {index < advancedOnlySubBlocks.length - 1 && ( -
-
-
+ )}
) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx index de69bbd886c..fb40c2496b1 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx @@ -4,7 +4,7 @@ import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { toError } from '@sim/utils/errors' import { useQueryClient } from '@tanstack/react-query' -import { History, Plus, Square } from 'lucide-react' +import { History, Plus } from 'lucide-react' import { useParams, useRouter } from 'next/navigation' import { usePostHog } from 'posthog-js/react' import { useShallow } from 'zustand/react/shallow' @@ -33,7 +33,7 @@ import { PopoverTrigger, Trash, } from '@/components/emcn' -import { Lock, Unlock, Upload } from '@/components/emcn/icons' +import { Lock, Square, Unlock, Upload } from '@/components/emcn/icons' import { VariableIcon } from '@/components/icons' import { requestJson } from '@/lib/api/client/request' import { diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/preview-editor/preview-editor.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/preview-editor/preview-editor.tsx index 254642e50ba..5975d80012e 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/preview-editor/preview-editor.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/preview-editor/preview-editor.tsx @@ -24,6 +24,7 @@ import { ChevronDown, Code, Combobox, + FieldDivider, Input, Label, Tooltip, @@ -1442,15 +1443,7 @@ function PreviewEditorContent({ )}
-
-
-
+
)} @@ -1472,17 +1465,7 @@ function PreviewEditorContent({ subBlockValues={subBlockValues} disabled={true} /> - {index < visibleSubBlocks.length - 1 && ( -
-
-
- )} + {index < visibleSubBlocks.length - 1 && }
))} diff --git a/apps/sim/app/workspace/providers/socket-provider.tsx b/apps/sim/app/workspace/providers/socket-provider.tsx index 4faa626b78e..fa43d4cb3eb 100644 --- a/apps/sim/app/workspace/providers/socket-provider.tsx +++ b/apps/sim/app/workspace/providers/socket-provider.tsx @@ -56,26 +56,6 @@ interface PresenceUser { selection?: { type: 'block' | 'edge' | 'none'; id?: string } } -interface TableRowUpdatedEvent { - tableId: string - rowId: string - data: Record - /** Per-group execution state. Keyed by `WorkflowGroup.id`. */ - executions?: Record - position: number - updatedAt: string | number -} - -interface TableRowDeletedEvent { - tableId: string - rowId: string -} - -interface TableDeletedEvent { - tableId: string - timestamp: number -} - interface SocketContextType { socket: Socket | null isConnected: boolean @@ -84,13 +64,10 @@ interface SocketContextType { isRetryingWorkflowJoin: boolean authFailed: boolean currentWorkflowId: string | null - currentTableId: string | null currentSocketId: string | null presenceUsers: PresenceUser[] joinWorkflow: (workflowId: string) => void leaveWorkflow: () => void - joinTable: (tableId: string) => void - leaveTable: () => void retryConnection: () => void emitWorkflowOperation: ( workflowId: string, @@ -128,9 +105,6 @@ interface SocketContextType { onWorkflowDeployed: (handler: (data: any) => void) => void onOperationConfirmed: (handler: (data: any) => void) => void onOperationFailed: (handler: (data: any) => void) => void - onTableRowUpdated: (handler: (data: TableRowUpdatedEvent) => void) => void - onTableRowDeleted: (handler: (data: TableRowDeletedEvent) => void) => void - onTableDeleted: (handler: (data: TableDeletedEvent) => void) => void } const SocketContext = createContext({ @@ -141,13 +115,10 @@ const SocketContext = createContext({ isRetryingWorkflowJoin: false, authFailed: false, currentWorkflowId: null, - currentTableId: null, currentSocketId: null, presenceUsers: [], joinWorkflow: () => {}, leaveWorkflow: () => {}, - joinTable: () => {}, - leaveTable: () => {}, retryConnection: () => {}, emitWorkflowOperation: () => {}, emitSubblockUpdate: () => {}, @@ -165,9 +136,6 @@ const SocketContext = createContext({ onWorkflowDeployed: () => {}, onOperationConfirmed: () => {}, onOperationFailed: () => {}, - onTableRowUpdated: () => {}, - onTableRowDeleted: () => {}, - onTableDeleted: () => {}, }) export const useSocket = () => useContext(SocketContext) @@ -201,10 +169,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { urlWorkflowIdRef.current = urlWorkflowId explicitWorkflowIdRef.current = explicitWorkflowId - const [currentTableId, setCurrentTableId] = useState(null) - const currentTableIdRef = useRef(null) - currentTableIdRef.current = currentTableId - const eventHandlers = useRef<{ workflowOperation?: (data: any) => void subblockUpdate?: (data: any) => void @@ -217,9 +181,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { workflowDeployed?: (data: any) => void operationConfirmed?: (data: any) => void operationFailed?: (data: any) => void - tableRowUpdated?: (data: TableRowUpdatedEvent) => void - tableRowDeleted?: (data: TableRowDeletedEvent) => void - tableDeleted?: (data: TableDeletedEvent) => void }>({}) const positionUpdateTimeouts = useRef>(new Map()) @@ -422,10 +383,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { transport: socketInstance.io.engine?.transport?.name, }) executeJoinCommands(joinControllerRef.current.setConnected(true)) - // Re-join the table room after (re)connect so missed events resume. - if (currentTableIdRef.current) { - socketInstance.emit('join-table', { tableId: currentTableIdRef.current }) - } }) socketInstance.on('disconnect', (reason) => { @@ -604,34 +561,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { eventHandlers.current.workflowDeployed?.(data) }) - socketInstance.on('join-table-success', ({ tableId }) => { - if (currentTableIdRef.current !== tableId) { - currentTableIdRef.current = tableId - setCurrentTableId(tableId) - } - logger.debug(`Joined table room ${tableId}`) - }) - - socketInstance.on('join-table-error', ({ tableId, error, code }) => { - logger.warn('join-table-error', { tableId, error, code }) - }) - - socketInstance.on('table-row-updated', (data: TableRowUpdatedEvent) => { - eventHandlers.current.tableRowUpdated?.(data) - }) - - socketInstance.on('table-row-deleted', (data: TableRowDeletedEvent) => { - eventHandlers.current.tableRowDeleted?.(data) - }) - - socketInstance.on('table-deleted', (data: TableDeletedEvent) => { - if (currentTableIdRef.current === data.tableId) { - currentTableIdRef.current = null - setCurrentTableId(null) - } - eventHandlers.current.tableDeleted?.(data) - }) - const rehydrateWorkflowStores = async (workflowId: string, workflowState: any) => { const [ { useOperationQueueStore }, @@ -840,33 +769,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { setExplicitWorkflowId(null) }, []) - const joinTable = useCallback((tableId: string) => { - const s = socketRef.current - if (!s) { - // Defer: when socket connects, the requestedTableId effect below re-emits join-table. - currentTableIdRef.current = tableId - setCurrentTableId(tableId) - return - } - // Idempotent: if we're already in this room, no-op. - if (currentTableIdRef.current === tableId && s.connected) return - // Switching tables: leave the previous room first. - if (currentTableIdRef.current && currentTableIdRef.current !== tableId) { - s.emit('leave-table', { tableId: currentTableIdRef.current }) - } - currentTableIdRef.current = tableId - setCurrentTableId(tableId) - s.emit('join-table', { tableId }) - }, []) - - const leaveTable = useCallback(() => { - const s = socketRef.current - const tableId = currentTableIdRef.current - currentTableIdRef.current = null - setCurrentTableId(null) - if (s && tableId) s.emit('leave-table', { tableId }) - }, []) - /** * Retry socket connection after auth failure. * Call this when user has re-authenticated (e.g., after login redirect). @@ -1115,18 +1017,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { eventHandlers.current.operationFailed = handler }, []) - const onTableRowUpdated = useCallback((handler: (data: TableRowUpdatedEvent) => void) => { - eventHandlers.current.tableRowUpdated = handler - }, []) - - const onTableRowDeleted = useCallback((handler: (data: TableRowDeletedEvent) => void) => { - eventHandlers.current.tableRowDeleted = handler - }, []) - - const onTableDeleted = useCallback((handler: (data: TableDeletedEvent) => void) => { - eventHandlers.current.tableDeleted = handler - }, []) - const contextValue = useMemo( () => ({ socket, @@ -1136,13 +1026,10 @@ export function SocketProvider({ children, user }: SocketProviderProps) { isRetryingWorkflowJoin, authFailed, currentWorkflowId, - currentTableId, currentSocketId, presenceUsers, joinWorkflow, leaveWorkflow, - joinTable, - leaveTable, retryConnection, emitWorkflowOperation, emitSubblockUpdate, @@ -1160,9 +1047,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { onWorkflowDeployed, onOperationConfirmed, onOperationFailed, - onTableRowUpdated, - onTableRowDeleted, - onTableDeleted, }), [ socket, @@ -1172,13 +1056,10 @@ export function SocketProvider({ children, user }: SocketProviderProps) { isRetryingWorkflowJoin, authFailed, currentWorkflowId, - currentTableId, currentSocketId, presenceUsers, joinWorkflow, leaveWorkflow, - joinTable, - leaveTable, retryConnection, emitWorkflowOperation, emitSubblockUpdate, @@ -1196,9 +1077,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { onWorkflowDeployed, onOperationConfirmed, onOperationFailed, - onTableRowUpdated, - onTableRowDeleted, - onTableDeleted, ] ) diff --git a/apps/sim/background/resume-execution.ts b/apps/sim/background/resume-execution.ts index 9b04463aa4f..f7bd79d2a37 100644 --- a/apps/sim/background/resume-execution.ts +++ b/apps/sim/background/resume-execution.ts @@ -1,6 +1,7 @@ import { createLogger } from '@sim/logger' import { toError } from '@sim/utils/errors' import { task } from '@trigger.dev/sdk' +import type { RowData, RowExecutionMetadata } from '@/lib/table/types' import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager' const logger = createLogger('TriggerResumeExecution') @@ -33,6 +34,135 @@ export async function executeResumeJob(payload: ResumeExecutionPayload) { throw new Error(`Paused execution not found: ${pausedExecutionId}`) } + // If this paused execution belongs to a table cell, rehydrate the cell + // context so post-resume block outputs land on the same row + group as + // the original cell task. Without this, blocks that run after the human + // approves write nothing back to the table — the row silently truncates + // at the pause boundary. The original `parentExecutionId` is preserved + // on the cell's `executions[gid]` so it stays one logical execution + // across the pause/resume boundary. + const { findCellContextByExecutionId } = await import('@/lib/table/workflow-columns') + const cellContext = await findCellContextByExecutionId(parentExecutionId) + + let cellOnBlockComplete: ((blockId: string, output: unknown) => Promise) | undefined + let writeCellTerminal: + | ((status: 'completed' | 'error' | 'paused', error: string | null) => Promise) + | undefined + + if (cellContext) { + const { getTableById } = await import('@/lib/table/service') + const { writeWorkflowGroupState, buildOutputsByBlockId } = await import( + '@/lib/table/cell-write' + ) + const { pluckByPath } = await import('@/lib/table/pluck') + + const table = await getTableById(cellContext.tableId) + const group = table?.schema.workflowGroups?.find((g) => g.id === cellContext.groupId) + if (group) { + const outputsByBlockId = buildOutputsByBlockId(group) + const accumulatedData: RowData = {} + const blockErrors: Record = {} + const writeCtx = { + tableId: cellContext.tableId, + rowId: cellContext.rowId, + workspaceId: cellContext.workspaceId, + groupId: cellContext.groupId, + executionId: parentExecutionId, + requestId: `wfgrp-resume-${parentExecutionId}`, + } + let writeChain: Promise = Promise.resolve() + let terminalWritten = false + + cellOnBlockComplete = async (blockId, output) => { + const outputs = outputsByBlockId.get(blockId) + if (!outputs) return + const blockResult = + output && typeof output === 'object' && 'output' in (output as object) + ? (output as { output: unknown }).output + : output + const errorMessage = + blockResult && + typeof blockResult === 'object' && + typeof (blockResult as { error?: unknown }).error === 'string' + ? (blockResult as { error: string }).error + : null + if (errorMessage) { + blockErrors[blockId] = errorMessage + } else { + for (const out of outputs) { + const plucked = pluckByPath(blockResult, out.path) + if (plucked === undefined) continue + accumulatedData[out.columnName] = plucked as RowData[string] + } + } + const dataSnapshot: RowData = { ...accumulatedData } + const blockErrorsSnapshot = { ...blockErrors } + writeChain = writeChain + .then(async () => { + if (terminalWritten) return + const partial: RowExecutionMetadata = { + status: 'running', + executionId: parentExecutionId, + jobId: null, + workflowId: cellContext.workflowId, + error: null, + blockErrors: blockErrorsSnapshot, + } + await writeWorkflowGroupState(writeCtx, { + executionState: partial, + dataPatch: dataSnapshot, + }) + }) + .catch((err) => { + logger.warn( + `Resume per-block partial write failed (table=${cellContext.tableId} row=${cellContext.rowId} group=${cellContext.groupId}):`, + err + ) + }) + } + + writeCellTerminal = async (status, error) => { + terminalWritten = true + await writeChain.catch(() => {}) + // Paused → keep `pending` + sentinel jobId so eligibility predicates + // continue treating the row as in-flight while we wait on another + // pause. Mirrors the initial cell-task pause branch. + const terminal: RowExecutionMetadata = + status === 'paused' + ? { + status: 'pending', + executionId: parentExecutionId, + jobId: `paused-${parentExecutionId}`, + workflowId: cellContext.workflowId, + error: null, + blockErrors, + } + : { + status, + executionId: parentExecutionId, + jobId: null, + workflowId: cellContext.workflowId, + error, + runningBlockIds: [], + blockErrors, + } + await writeWorkflowGroupState(writeCtx, { + executionState: terminal, + dataPatch: accumulatedData, + }) + } + } else { + logger.warn( + 'Cell context found but table or group missing — falling back to plain resume', + { + parentExecutionId, + tableId: cellContext.tableId, + groupId: cellContext.groupId, + } + ) + } + } + const result = await PauseResumeManager.startResumeExecution({ resumeEntryId: payload.resumeEntryId, resumeExecutionId: payload.resumeExecutionId, @@ -40,8 +170,19 @@ export async function executeResumeJob(payload: ResumeExecutionPayload) { contextId: payload.contextId, resumeInput: payload.resumeInput, userId: payload.userId, + ...(cellOnBlockComplete ? { onBlockComplete: cellOnBlockComplete } : {}), }) + if (writeCellTerminal) { + if (result.status === 'paused') { + await writeCellTerminal('paused', null) + } else if (result.success) { + await writeCellTerminal('completed', null) + } else { + await writeCellTerminal('error', result.error ?? 'Workflow execution failed') + } + } + logger.info('Background resume execution completed', { resumeExecutionId, workflowId, diff --git a/apps/sim/background/workflow-column-execution.ts b/apps/sim/background/workflow-column-execution.ts index ba0b03519e3..7c4b977d537 100644 --- a/apps/sim/background/workflow-column-execution.ts +++ b/apps/sim/background/workflow-column-execution.ts @@ -36,9 +36,9 @@ export async function executeWorkflowGroupCellJob( const { getTableById, getRowById, updateRow } = await import('@/lib/table/service') const { executeWorkflow } = await import('@/lib/workflows/executor/execute-workflow') const { loadWorkflowFromNormalizedTables } = await import('@/lib/workflows/persistence/utils') - const { writeWorkflowGroupState, buildOutputsByBlockId } = await import( - '@/lib/table/cell-write' - ) + const { writeWorkflowGroupState, markWorkflowGroupPickedUp, buildOutputsByBlockId } = + await import('@/lib/table/cell-write') + const { stashCellContextForResume } = await import('@/lib/table/workflow-columns') const cellCtx = { tableId, rowId, workspaceId, groupId, executionId, requestId } const writeState = (executionState: RowExecutionMetadata, dataPatch?: RowData) => @@ -112,6 +112,16 @@ export async function executeWorkflowGroupCellJob( return } + // Flip `queued` → `running` to signal the worker has actually started. + // Bail out if the cancel-sticky guard rejects the write (a stop click + // landed between enqueue and pickup). + const queuedExec = row.executions?.[groupId] as RowExecutionMetadata | undefined + const pickedUp = await markWorkflowGroupPickedUp(cellCtx, { + workflowId, + jobId: queuedExec?.jobId ?? null, + }) + if (pickedUp === 'skipped') return + // Output columns produced by THIS group are skipped on input — they're // populated by the run we're starting. Other group's outputs ARE // included (they're plain primitives in `row.data` thanks to the @@ -267,6 +277,36 @@ export async function executeWorkflowGroupCellJob( terminalWritten = true await writeChain.catch(() => {}) + if (result.status === 'paused') { + // HITL pause: keep the row in `pending` so the renderer surfaces it + // the same way logs do, but stamp a sentinel jobId so the scheduler's + // eligibility predicate keeps treating the row as in-flight (no + // re-enqueue while we wait on a human). Resume worker rewrites this + // back to `completed`/`error` once the pause clears. + await writeState( + { + status: 'pending', + executionId, + jobId: `paused-${executionId}`, + workflowId, + error: null, + runningBlockIds: [], + blockErrors, + }, + accumulatedData + ) + await stashCellContextForResume({ + executionId, + tableId, + tableName, + rowId, + groupId, + workflowId, + workspaceId, + }) + return + } + await writeState( { status: result.success ? 'completed' : 'error', @@ -313,10 +353,10 @@ export const workflowGroupCellTask = task({ machine: 'medium-1x', retry: { maxAttempts: 1 }, // Combined with `concurrencyKey: tableId`, caps each table's sub-queue to - // 10 in-flight cell jobs while letting different tables run in parallel. + // 20 in-flight cell jobs while letting different tables run in parallel. queue: { name: 'workflow-group-cell', - concurrencyLimit: 10, + concurrencyLimit: 20, }, run: (payload: WorkflowGroupCellPayload, { signal }) => executeWorkflowGroupCellJob(payload, signal), diff --git a/apps/sim/components/emcn/components/field-divider/field-divider.tsx b/apps/sim/components/emcn/components/field-divider/field-divider.tsx new file mode 100644 index 00000000000..8d1f6b10212 --- /dev/null +++ b/apps/sim/components/emcn/components/field-divider/field-divider.tsx @@ -0,0 +1,42 @@ +import { cn } from '@/lib/core/utils/cn' + +const DASHED_DIVIDER_STYLE = { + backgroundImage: + 'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)', +} as const + +export interface FieldDividerProps extends React.HTMLAttributes { + /** + * Adds the `subblock-divider` marker class so the workflow editor's CSS + * (`globals.css` `:has()` rule) can hide the divider when adjacent subblocks + * render empty content. Default `false` — only the workflow editor needs it. + */ + subblockMarker?: boolean +} + +/** + * Dashed horizontal divider used between fields in form-style panels (the + * workflow editor's subblock list, the table column/workflow sidebars). Same + * visual as the existing `subblock-divider` pattern in `editor.tsx`, + * promoted here so consumers don't keep redefining the gradient style. + * + * @example + * ```tsx + * ... + * + * ... + * ``` + */ +function FieldDivider({ className, subblockMarker = false, ...props }: FieldDividerProps) { + return ( +
+
+
+ ) +} + +export { FieldDivider } diff --git a/apps/sim/components/emcn/components/index.ts b/apps/sim/components/emcn/components/index.ts index 0f30eeb09ac..56f564bbe21 100644 --- a/apps/sim/components/emcn/components/index.ts +++ b/apps/sim/components/emcn/components/index.ts @@ -59,6 +59,7 @@ export { DropdownMenuTrigger, } from './dropdown-menu/dropdown-menu' export { Expandable, ExpandableContent } from './expandable/expandable' +export { FieldDivider, type FieldDividerProps } from './field-divider/field-divider' export { FormField, type FormFieldProps } from './form-field/form-field' export { Input, type InputProps, inputVariants } from './input/input' export { InputOTP, InputOTPGroup, InputOTPSeparator, InputOTPSlot } from './input-otp/input-otp' diff --git a/apps/sim/components/emcn/icons/index.ts b/apps/sim/components/emcn/icons/index.ts index 9def17d5665..781a1a1b09a 100644 --- a/apps/sim/components/emcn/icons/index.ts +++ b/apps/sim/components/emcn/icons/index.ts @@ -72,6 +72,7 @@ export { Server } from './server' export { Settings } from './settings' export { ShieldCheck } from './shield-check' export { Sim } from './sim' +export { Square } from './square' export { SquareArrowUpRight } from './square-arrow-up-right' export { Table } from './table' export { TableX } from './table-x' diff --git a/apps/sim/components/emcn/icons/square.tsx b/apps/sim/components/emcn/icons/square.tsx new file mode 100644 index 00000000000..66bea3a4a09 --- /dev/null +++ b/apps/sim/components/emcn/icons/square.tsx @@ -0,0 +1,27 @@ +import type { SVGProps } from 'react' + +/** + * Square icon (stroke/outline version) — used as the "stop" glyph in + * media-control-style buttons (per-row run/stop, table action bar, context + * menus). Same visual as `lucide-react`'s `Square` so existing call sites + * migrate without any visual change. + */ +export function Square(props: SVGProps) { + return ( + + ) +} diff --git a/apps/sim/hooks/queries/tables.ts b/apps/sim/hooks/queries/tables.ts index 34055ea83ee..5314220f842 100644 --- a/apps/sim/hooks/queries/tables.ts +++ b/apps/sim/hooks/queries/tables.ts @@ -4,7 +4,6 @@ * React Query hooks for managing user-defined tables. */ -import { useEffect } from 'react' import { createLogger } from '@sim/logger' import { type InfiniteData, @@ -15,6 +14,7 @@ import { useQueryClient, } from '@tanstack/react-query' import { toast } from '@/components/emcn' +import { isValidationError } from '@/lib/api/client/errors' import { requestJson } from '@/lib/api/client/request' import type { ContractJsonResponse } from '@/lib/api/contracts' import { @@ -39,9 +39,10 @@ import { type InsertTableRowBodyInput, listTableRowsContract, listTablesContract, + type RunMode, renameTableContract, restoreTableContract, - runWorkflowGroupContract, + runColumnContract, type TableIdParamsInput, type TableRowParamsInput, type TableRowsQueryInput, @@ -67,7 +68,7 @@ import type { WorkflowGroupDependencies, WorkflowGroupOutput, } from '@/lib/table' -import { useSocket } from '@/app/workspace/providers/socket-provider' +import { optimisticallyScheduleNewlyEligibleGroups } from '@/lib/table/deps' /** Short poll to surface running → completed transitions from the server without a dedicated realtime channel. */ const ROWS_POLL_INTERVAL_WHILE_RUNNING_MS = 1500 @@ -77,13 +78,20 @@ function hasRunningGroupExecution(rows: TableRow[] | undefined): boolean { for (const row of rows) { const executions = row.executions ?? {} for (const key in executions) { - const exec = executions[key] - if (exec?.status === 'running' || exec?.status === 'pending') return true + if (isOptimisticInFlight(executions[key])) return true } } return false } +function hasRunningGroupExecutionInPages(pages: TableRowsResponse[] | undefined): boolean { + if (!pages) return false + for (const page of pages) { + if (hasRunningGroupExecution(page.rows)) return true + } + return false +} + const logger = createLogger('TableQueries') type TableQueryScope = 'active' | 'archived' | 'all' @@ -224,12 +232,9 @@ interface InfiniteTableRowsParams { } /** - * Fetch a single page of rows for a table with pagination/filter/sort. - * - * Subscribes to the realtime `table-row-updated` / `table-row-deleted` socket - * events for this `tableId`; on receipt, merges the delta into every cached - * rows query for the table via `setQueriesData`. Polling stays as a fallback - * gated on `!isConnected` so a brief disconnect window doesn't go stale. + * Fetch a single page of rows for a table with pagination/filter/sort. Polls + * while any cell is in flight so cells reach their terminal state without a + * manual refresh. */ export function useTableRows({ workspaceId, @@ -249,97 +254,6 @@ export function useTableRows({ sort: sort ?? null, includeTotal, }) - const { - isConnected: socketConnected, - joinTable, - leaveTable, - onTableRowUpdated, - onTableRowDeleted, - } = useSocket() - - useEffect(() => { - if (!tableId) return - joinTable(tableId) - - onTableRowUpdated((event) => { - if (event.tableId !== tableId) return - // While an optimistic mutation is in flight, applying the socket delta - // could clobber the optimistic state — defer to onSettled invalidate. - // Mark stale without triggering a refetch (refetchType: 'none') so the - // refetch races neither the in-flight optimistic update nor any - // server-side post-response work the mutation is awaiting (e.g. backfill). - if (queryClient.isMutating() > 0) { - queryClient.invalidateQueries({ - queryKey: tableKeys.rowsRoot(tableId), - refetchType: 'none', - }) - return - } - queryClient.setQueriesData( - { queryKey: tableKeys.rowsRoot(tableId) }, - (current) => { - if (!current) return current - const incoming: TableRow = { - id: event.rowId, - data: event.data as RowData, - executions: (event.executions as RowExecutions) ?? {}, - position: event.position, - createdAt: '', - updatedAt: - typeof event.updatedAt === 'string' ? event.updatedAt : String(event.updatedAt), - } - const idx = current.rows.findIndex((r) => r.id === event.rowId) - if (idx === -1) { - const next = [...current.rows, incoming].sort((a, b) => a.position - b.position) - return { - ...current, - rows: next, - totalCount: current.totalCount === null ? null : current.totalCount + 1, - } - } - const merged = { - ...current.rows[idx], - data: incoming.data, - executions: incoming.executions, - updatedAt: incoming.updatedAt, - } - const next = [...current.rows] - next[idx] = merged - return { ...current, rows: next } - } - ) - }) - - onTableRowDeleted((event) => { - if (event.tableId !== tableId) return - if (queryClient.isMutating() > 0) { - queryClient.invalidateQueries({ - queryKey: tableKeys.rowsRoot(tableId), - refetchType: 'none', - }) - return - } - queryClient.setQueriesData( - { queryKey: tableKeys.rowsRoot(tableId) }, - (current) => { - if (!current) return current - const next = current.rows.filter((r) => r.id !== event.rowId) - if (next.length === current.rows.length) return current - return { - ...current, - rows: next, - totalCount: current.totalCount === null ? null : Math.max(0, current.totalCount - 1), - } - } - ) - }) - - return () => { - leaveTable() - } - // joinTable / leaveTable / on* are stable callbacks; tableId is the only real dep. - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [tableId]) return useQuery({ queryKey: [...tableKeys.rowsRoot(tableId), paramsKey] as const, @@ -348,13 +262,8 @@ export function useTableRows({ enabled: Boolean(workspaceId && tableId) && enabled, staleTime: 30 * 1000, placeholderData: keepPreviousData, - // Polling is the fallback for when the socket isn't carrying updates. - // - Pause while any mutation is in flight (optimistic-update guard). - // - Skip while connected (sockets push every cell write). - // - Otherwise poll only while a cell is in `running` state, the original cadence. refetchInterval: (query) => { if (queryClient.isMutating() > 0) return false - if (socketConnected) return false return hasRunningGroupExecution(query.state.data?.rows) ? ROWS_POLL_INTERVAL_WHILE_RUNNING_MS : false @@ -378,6 +287,7 @@ export function useInfiniteTableRows({ sort, enabled = true, }: InfiniteTableRowsParams) { + const queryClient = useQueryClient() const paramsKey = JSON.stringify({ pageSize, filter: filter ?? null, @@ -404,6 +314,22 @@ export function useInfiniteTableRows({ }, enabled: Boolean(workspaceId && tableId) && enabled, staleTime: 30 * 1000, + /** + * Poll while any row has a `pending` or `running` group execution. + * Realtime sockets push every cell write, but cross-network paths + * (trigger.dev workers → realtime ECS, client through CloudFront/proxy) + * occasionally drop events. Polling at the running cadence is the + * safety net so cells reach their terminal state without a refresh. + * No polling when nothing is running and no polling while a mutation + * is in flight (optimistic-update guard). + */ + refetchInterval: (query) => { + if (queryClient.isMutating() > 0) return false + return hasRunningGroupExecutionInPages(query.state.data?.pages) + ? ROWS_POLL_INTERVAL_WHILE_RUNNING_MS + : false + }, + refetchIntervalInBackground: false, }) } @@ -458,6 +384,7 @@ export function useRenameTable(workspaceId: string) { }) }, onError: (error) => { + if (isValidationError(error)) return toast.error(error.message, { duration: 5000 }) }, onSettled: (_data, _error, variables) => { @@ -513,6 +440,11 @@ export function useCreateTableRow({ workspaceId, tableId }: RowMutationContext) reconcileCreatedRow(queryClient, tableId, row) }, + onError: (error) => { + // Validation errors are surfaced inline by the caller (see useUpdateColumn). + if (isValidationError(error)) return + toast.error(error.message, { duration: 5000 }) + }, onSettled: () => { invalidateRowCount(queryClient, tableId) }, @@ -624,6 +556,10 @@ export function useBatchCreateTableRows({ workspaceId, tableId }: RowMutationCon }, }) }, + onError: (error) => { + if (isValidationError(error)) return + toast.error(error.message, { duration: 5000 }) + }, onSettled: () => { invalidateRowCount(queryClient, tableId) }, @@ -651,18 +587,31 @@ export function useUpdateTableRow({ workspaceId, tableId }: RowMutationContext) queryKey: tableKeys.rowsRoot(tableId), }) - patchCachedRows(queryClient, tableId, (row) => - row.id === rowId ? { ...row, data: { ...row.data, ...data } as RowData } : row - ) + const groups = + queryClient.getQueryData(tableKeys.detail(tableId))?.schema + .workflowGroups ?? [] + + patchCachedRows(queryClient, tableId, (row) => { + if (row.id !== rowId) return row + const patch = data as Partial + const nextExecutions = optimisticallyScheduleNewlyEligibleGroups(groups, row, patch) + return { + ...row, + data: { ...row.data, ...patch } as RowData, + ...(nextExecutions ? { executions: nextExecutions } : {}), + } + }) return { previousQueries } }, - onError: (_err, _vars, context) => { + onError: (error, _vars, context) => { if (context?.previousQueries) { for (const [queryKey, data] of context.previousQueries) { queryClient.setQueryData(queryKey, data) } } + if (isValidationError(error)) return + toast.error(error.message, { duration: 5000 }) }, onSettled: () => { invalidateRowData(queryClient, tableId) @@ -698,21 +647,32 @@ export function useBatchUpdateTableRows({ workspaceId, tableId }: RowMutationCon }) const updateMap = new Map(updates.map((u) => [u.rowId, u.data])) + const groups = + queryClient.getQueryData(tableKeys.detail(tableId))?.schema + .workflowGroups ?? [] patchCachedRows(queryClient, tableId, (row) => { - const patch = updateMap.get(row.id) - if (!patch) return row - return { ...row, data: { ...row.data, ...patch } as RowData } + const raw = updateMap.get(row.id) + if (!raw) return row + const patch = raw as Partial + const nextExecutions = optimisticallyScheduleNewlyEligibleGroups(groups, row, patch) + return { + ...row, + data: { ...row.data, ...patch } as RowData, + ...(nextExecutions ? { executions: nextExecutions } : {}), + } }) return { previousQueries } }, - onError: (_err, _vars, context) => { + onError: (error, _vars, context) => { if (context?.previousQueries) { for (const [queryKey, data] of context.previousQueries) { queryClient.setQueryData(queryKey, data) } } + if (isValidationError(error)) return + toast.error(error.message, { duration: 5000 }) }, onSettled: () => { invalidateRowData(queryClient, tableId) @@ -792,6 +752,8 @@ export function useUpdateColumn({ workspaceId, tableId }: RowMutationContext) { }) }, onError: (error) => { + // Validation errors are surfaced as inline FieldErrors by the caller. + if (isValidationError(error)) return toast.error(error.message, { duration: 5000 }) }, onSettled: () => { @@ -870,7 +832,7 @@ export function useCancelTableRuns({ workspaceId, tableId }: RowMutationContext) const nextExecutions: RowExecutions = { ...executions } for (const gid in executions) { const exec = executions[gid] - if (exec.status !== 'running' && exec.status !== 'pending') continue + if (!isOptimisticInFlight(exec)) continue // Preserve blockErrors so cells that already errored keep their // Error rendering after the stop — only cells without a value or // error should flip to "Cancelled". @@ -1075,17 +1037,13 @@ export function useDeleteColumn({ workspaceId, tableId }: RowMutationContext) { }) } -interface RunGroupVariables { - groupId: string - /** Workflow id sourced from the group's config — used as a fallback for the - * optimistic execution `workflowId` field when the row hasn't run before. */ - workflowId: string - /** - * `all` — fire every dep-satisfied row (default). - * `incomplete` — only rows that have never run or whose last run ended in - * `failed`/`aborted`. Mirrored by the server-side filter. - */ - runMode?: 'all' | 'incomplete' +interface RunColumnVariables { + groupIds: string[] + /** `all` (default) fires every dep-satisfied row; `incomplete` skips rows + * whose last run completed successfully. */ + runMode?: RunMode + /** Restrict to these rows. Server applies the same eligibility predicate. */ + rowIds?: string[] } type InfiniteRowsCache = { pages: TableRowsResponse[]; pageParams: number[] } @@ -1169,47 +1127,82 @@ export function restoreCachedWorkflowCells( } /** - * Trigger a workflow-group run for every eligible row in the table. The server - * filters by deps; this hook optimistically flips each matching row's - * `executions[groupId]` to `pending` immediately so the UI doesn't lag the - * network round-trip. + * Optimistic exec patch — flips every targeted (group, row) execution to + * `pending` so the UI doesn't lag the round-trip. Server eligibility may skip + * some; refetch on settle reconciles. + */ +function buildPendingExec( + prev: RowExecutionMetadata | undefined, + workflowIdFallback?: string +): RowExecutionMetadata { + return { + status: 'pending', + executionId: prev?.executionId ?? null, + jobId: null, + workflowId: prev?.workflowId ?? workflowIdFallback ?? '', + error: null, + } +} + +/** Broader sibling of `isExecInFlight` from `lib/table/deps`: treats any + * `pending` (with or without a jobId) as in-flight. The optimistic-patch + * context uses this to avoid re-marking a cell we just flipped optimistically. + * The eligibility predicate uses the stricter version. */ +function isOptimisticInFlight(exec: RowExecutionMetadata | undefined): boolean { + return exec?.status === 'running' || exec?.status === 'queued' || exec?.status === 'pending' +} + +/** + * The single canonical run mutation. Every UI gesture (single cell, per-row + * Play, action-bar Play/Refresh, column-header menu) maps to a `groupIds` + + * optional `rowIds` shape. Optimistic patch flips targeted (row, group) cells + * to `pending`; refetch on settle reconciles. */ -export function useRunGroup({ workspaceId, tableId }: RowMutationContext) { +export function useRunColumn({ workspaceId, tableId }: RowMutationContext) { const queryClient = useQueryClient() return useMutation({ - mutationFn: async ({ groupId, runMode = 'all' }: RunGroupVariables) => { - return requestJson(runWorkflowGroupContract, { - params: { tableId, groupId }, - body: { workspaceId, runMode }, + mutationFn: async ({ groupIds, runMode = 'all', rowIds }: RunColumnVariables) => { + return requestJson(runColumnContract, { + params: { tableId }, + body: { + workspaceId, + groupIds, + runMode, + ...(rowIds && rowIds.length > 0 ? { rowIds } : {}), + }, }) }, - onMutate: async ({ groupId, workflowId, runMode = 'all' }) => { + onMutate: async ({ groupIds, runMode = 'all', rowIds }) => { + const targetRowIds = rowIds && rowIds.length > 0 ? new Set(rowIds) : null + const targetGroupIds = new Set(groupIds) const snapshots = await snapshotAndMutateRows(queryClient, tableId, (r) => { - const exec = r.executions?.[groupId] as RowExecutionMetadata | undefined - if (exec?.status === 'running' || exec?.status === 'pending') return null - // Mirror the server-side `incomplete` filter so the optimistic update - // doesn't flash `pending` on rows the server is going to skip. - if (runMode === 'incomplete' && exec?.status === 'completed') return null - const pending: RowExecutionMetadata = { - status: 'pending', - executionId: exec?.executionId ?? null, - jobId: null, - workflowId: exec?.workflowId ?? workflowId, - error: null, - } - return { - ...r, - executions: { ...(r.executions ?? {}), [groupId]: pending }, + if (targetRowIds && !targetRowIds.has(r.id)) return null + const executions = r.executions ?? {} + let changed = false + const next: RowExecutions = { ...executions } + for (const groupId of targetGroupIds) { + const exec = executions[groupId] as RowExecutionMetadata | undefined + if (isOptimisticInFlight(exec)) continue + if (runMode === 'incomplete' && exec?.status === 'completed') continue + next[groupId] = buildPendingExec(exec) + changed = true } + if (!changed) return null + return { ...r, executions: next } }) return { snapshots } }, onError: (_err, _variables, context) => { if (context?.snapshots) restoreCachedWorkflowCells(queryClient, context.snapshots) }, - onSettled: () => { - queryClient.invalidateQueries({ queryKey: tableKeys.rowsRoot(tableId) }) + onSettled: async () => { + // Cancel any in-flight poll first — without this, a poll started during + // the mutation but lands AFTER it resolves can clobber the optimistic + // patch with stale data, producing a queued → cancelled → queued flicker + // before the authoritative refetch arrives. + await queryClient.cancelQueries({ queryKey: tableKeys.rowsRoot(tableId) }) + await queryClient.invalidateQueries({ queryKey: tableKeys.rowsRoot(tableId) }) }, }) } @@ -1243,6 +1236,8 @@ interface UpdateWorkflowGroupVariables { dependencies?: WorkflowGroupDependencies outputs?: WorkflowGroupOutput[] newOutputColumns?: UpdateWorkflowGroupBodyInput['newOutputColumns'] + mappingUpdates?: UpdateWorkflowGroupBodyInput['mappingUpdates'] + autoRun?: boolean } export function useUpdateWorkflowGroup({ workspaceId, tableId }: RowMutationContext) { diff --git a/apps/sim/lib/api/client/errors.ts b/apps/sim/lib/api/client/errors.ts index 9fa4dc5b18b..ea61e501ee9 100644 --- a/apps/sim/lib/api/client/errors.ts +++ b/apps/sim/lib/api/client/errors.ts @@ -25,3 +25,83 @@ export class ApiClientError extends Error { export function isApiClientError(error: unknown): error is ApiClientError { return error instanceof ApiClientError } + +export interface ValidationIssue { + /** Path of the failing field, e.g. ['updates', 'name']. */ + path: ReadonlyArray + /** Human-readable message — uses the schema's custom error string when set. */ + message: string +} + +interface UnknownIssue { + path?: unknown + message?: unknown +} + +function normalizeIssue(raw: unknown): ValidationIssue | null { + if (!raw || typeof raw !== 'object') return null + const { path, message } = raw as UnknownIssue + if (typeof message !== 'string' || message.length === 0) return null + if (!Array.isArray(path)) return null + const cleanPath = path.filter( + (segment): segment is string | number => + typeof segment === 'string' || typeof segment === 'number' + ) + return { path: cleanPath, message } +} + +/** + * Pull a list of validation issues out of an unknown error. Recognises both + * shapes the boundary produces: + * + * - Client-side contract validation: `requestJson` calls `schema.parse(input)` + * before fetch; failure throws a raw `ZodError` whose `.issues` is the array. + * - Server-side contract validation: route returns `{ error, details: [...] }`, + * which `requestJson` re-throws as `ApiClientError` carrying the body. + * + * Returns an empty array when the error isn't a recognised validation shape so + * callers can fall back to toast/log paths. + */ +export function extractValidationIssues(error: unknown): ValidationIssue[] { + if (!error || typeof error !== 'object') return [] + + if (isApiClientError(error)) { + const body = error.body + if (body && typeof body === 'object') { + const details = (body as { details?: unknown }).details + if (Array.isArray(details)) { + return details.map(normalizeIssue).filter((i): i is ValidationIssue => i !== null) + } + } + return [] + } + + const issues = (error as { issues?: unknown }).issues + if (Array.isArray(issues)) { + return issues.map(normalizeIssue).filter((i): i is ValidationIssue => i !== null) + } + return [] +} + +/** + * Match a single issue by suffix path. `pathSuffix` lets callers ignore the + * outer body wrapper — `findValidationIssue(err, ['name'])` matches both + * `path: ['name']` and `path: ['updates', 'name']`. + */ +export function findValidationIssue( + error: unknown, + pathSuffix: ReadonlyArray +): ValidationIssue | null { + const issues = extractValidationIssues(error) + for (const issue of issues) { + if (issue.path.length < pathSuffix.length) continue + const tail = issue.path.slice(issue.path.length - pathSuffix.length) + if (tail.every((segment, i) => segment === pathSuffix[i])) return issue + } + return null +} + +/** True when the error is a recognised validation failure (client or server). */ +export function isValidationError(error: unknown): boolean { + return extractValidationIssues(error).length > 0 +} diff --git a/apps/sim/lib/api/contracts/tables.ts b/apps/sim/lib/api/contracts/tables.ts index 05248757a68..40733c09a80 100644 --- a/apps/sim/lib/api/contracts/tables.ts +++ b/apps/sim/lib/api/contracts/tables.ts @@ -710,7 +710,7 @@ export const deleteTableRowsContract = defineRouteContract({ // ============================================================================ // Workflow group contracts (`/api/table/[tableId]/groups`, `/cancel-runs`, -// `/groups/[groupId]/run`, `/rows/[rowId]/run-workflow-group`) +// `/columns/run`, `/rows/run`, `/rows/[rowId]/cells/[groupId]/run`) // ============================================================================ const workflowGroupOutputSchema = z.object({ @@ -721,7 +721,6 @@ const workflowGroupOutputSchema = z.object({ const workflowGroupDependenciesSchema = z.object({ columns: z.array(z.string()).optional(), - workflowGroups: z.array(z.string()).optional(), }) const workflowGroupOutputColumnSchema = z.object({ @@ -744,6 +743,11 @@ export const addWorkflowGroupBodySchema = z.object({ name: z.string().optional(), dependencies: workflowGroupDependenciesSchema.optional(), outputs: z.array(workflowGroupOutputSchema).min(1), + /** When `false`, the group never auto-fires from the scheduler — it can + * only be triggered manually. Defaults to `true`. Persisted on the + * group; distinct from the top-level `autoRun` below which is a + * one-shot "schedule existing rows on creation" flag. */ + autoRun: z.boolean().optional(), }), outputColumns: z.array(workflowGroupOutputColumnSchema).min(1), /** When false, skip auto-scheduling existing rows after the group is added. @@ -752,6 +756,21 @@ export const addWorkflowGroupBodySchema = z.object({ autoRun: z.boolean().optional(), }) +/** + * Re-points an existing column to a different workflow output. Use when the + * user changes which `(blockId, path)` flows into a column they already have, + * without restructuring the rest of the group's outputs. Distinct from the + * `outputs` add/remove diff: the column keeps its identity, type, deps, and + * row position; only its source mapping changes. Existing row values for the + * column are backfilled from saved execution logs at the new `(blockId, path)` + * — rows whose log has no value for the new mapping end up empty. + */ +const workflowGroupMappingUpdateSchema = z.object({ + columnName: z.string().min(1), + blockId: z.string().min(1), + path: z.string().min(1), +}) + export const updateWorkflowGroupBodySchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), groupId: z.string().min(1), @@ -760,6 +779,14 @@ export const updateWorkflowGroupBodySchema = z.object({ dependencies: workflowGroupDependenciesSchema.optional(), outputs: z.array(workflowGroupOutputSchema).optional(), newOutputColumns: z.array(workflowGroupOutputColumnSchema).optional(), + /** + * Per-column mapping swaps: keep the column, change the source `(blockId, + * path)`. Applied before the `outputs` add/remove diff. Each entry's + * `columnName` must already exist in the group's outputs. + */ + mappingUpdates: z.array(workflowGroupMappingUpdateSchema).optional(), + /** Toggle the group's persisted auto-run flag. Omit to leave unchanged. */ + autoRun: z.boolean().optional(), }) export const deleteWorkflowGroupBodySchema = z.object({ @@ -840,7 +867,7 @@ export const cancelTableRunsContract = defineRouteContract({ }) /** - * Run modes for `POST /api/table/[tableId]/groups/[groupId]/run`: + * Run modes for `POST /api/table/[tableId]/columns/run`: * - `all` — every dep-satisfied row not already running/pending * - `incomplete` — same, but additionally restricted to rows whose group has * never run, or whose last run ended in `failed`/`aborted` @@ -848,46 +875,35 @@ export const cancelTableRunsContract = defineRouteContract({ * Field is named `runMode` (not `mode`) to disambiguate from the table-import * `mode` arg (`append` / `replace`) which lives on a different op. */ -export const runWorkflowGroupBodySchema = z.object({ +/** + * Run a set of workflow groups across the table or a row subset. The single + * canonical user-driven run op — every UI gesture (single cell, per-row Play, + * action-bar Play/Refresh, column-header menu) reduces to a `groupIds` + + * optional `rowIds` shape. AI uses the `run_column` tool op. + */ +export const runColumnBodySchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), + groupIds: z.array(z.string().min(1)).min(1), runMode: z.enum(['all', 'incomplete']).default('all'), - /** Optional row scope. When provided, only these rows are candidates — the - * same eligibility predicate (deps satisfied, not in-flight, runMode filter) - * still applies, so a passed-in row that's mid-run or has unmet deps is - * silently skipped. Omit to run across the entire table. */ rowIds: z.array(z.string().min(1)).min(1).optional(), }) -export const runWorkflowGroupContract = defineRouteContract({ +export const runColumnContract = defineRouteContract({ method: 'POST', - path: '/api/table/[tableId]/groups/[groupId]/run', - params: groupIdParamsSchema, - body: runWorkflowGroupBodySchema, + path: '/api/table/[tableId]/columns/run', + params: tableIdParamsSchema, + body: runColumnBodySchema, response: { mode: 'json', schema: successResponseSchema(z.object({ triggered: z.number() })), }, }) -export const runRowWorkflowGroupBodySchema = z.object({ - workspaceId: z.string().min(1, 'Workspace ID is required'), - groupId: z.string().min(1, 'Group ID is required'), -}) - -export const runRowWorkflowGroupContract = defineRouteContract({ - method: 'POST', - path: '/api/table/[tableId]/rows/[rowId]/run-workflow-group', - params: tableRowParamsSchema, - body: runRowWorkflowGroupBodySchema, - response: { - mode: 'json', - schema: successResponseSchema(z.object({ executionId: z.string() })), - }, -}) - export type AddWorkflowGroupBodyInput = z.input export type UpdateWorkflowGroupBodyInput = z.input export type DeleteWorkflowGroupBodyInput = z.input export type CancelTableRunsBodyInput = z.input -export type RunWorkflowGroupBodyInput = z.input -export type RunRowWorkflowGroupBodyInput = z.input +export type RunColumnBodyInput = z.input +/** Shared `runMode` union — used by every UI / hook / Mothership site that + * builds a run-column payload. Single source of truth for the literal pair. */ +export type RunMode = NonNullable diff --git a/apps/sim/lib/billing/cleanup-dispatcher.ts b/apps/sim/lib/billing/cleanup-dispatcher.ts index b752e725515..e1dea22e95e 100644 --- a/apps/sim/lib/billing/cleanup-dispatcher.ts +++ b/apps/sim/lib/billing/cleanup-dispatcher.ts @@ -1,13 +1,13 @@ import { db } from '@sim/db' import { organization, subscription, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { toError } from '@sim/utils/errors' import { tasks } from '@trigger.dev/sdk' import { and, eq, inArray, isNotNull, isNull, sql } from 'drizzle-orm' import { type PlanCategory, sqlIsPaid, sqlIsPro, sqlIsTeam } from '@/lib/billing/plan-helpers' import { ENTITLED_SUBSCRIPTION_STATUSES } from '@/lib/billing/subscriptions/utils' import { getJobQueue } from '@/lib/core/async-jobs' import { shouldExecuteInline } from '@/lib/core/async-jobs/config' +import type { EnqueueOptions } from '@/lib/core/async-jobs/types' import { isTriggerAvailable } from '@/lib/knowledge/documents/service' const logger = createLogger('RetentionDispatcher') @@ -143,53 +143,18 @@ export async function resolveCleanupScope( } } -type RunnerFn = (payload: CleanupJobPayload) => Promise - -async function getInlineRunner(jobType: CleanupJobType): Promise { - switch (jobType) { - case 'cleanup-logs': { - const { runCleanupLogs } = await import('@/background/cleanup-logs') - return runCleanupLogs - } - case 'cleanup-soft-deletes': { - const { runCleanupSoftDeletes } = await import('@/background/cleanup-soft-deletes') - return runCleanupSoftDeletes - } - case 'cleanup-tasks': { - const { runCleanupTasks } = await import('@/background/cleanup-tasks') - return runCleanupTasks - } - } -} - -/** - * When the job queue backend is "database" (no Trigger.dev, no BullMQ), the - * enqueued rows just sit in async_jobs forever. Run them inline as fire-and-forget - * promises, following the same pattern as the workflow execution API route. - */ -async function runInlineIfNeeded( - jobQueue: Awaited>, - jobType: CleanupJobType, - jobId: string, - payload: CleanupJobPayload -): Promise { - if (!shouldExecuteInline()) return - const runner = await getInlineRunner(jobType) - void (async () => { - try { - await jobQueue.startJob(jobId) - await runner(payload) - await jobQueue.completeJob(jobId, null) - } catch (error) { - const errorMessage = toError(error).message - logger.error(`[${jobType}] Inline job ${jobId} failed`, { error: errorMessage }) - try { - await jobQueue.markJobFailed(jobId, errorMessage) - } catch (markErr) { - logger.error(`[${jobType}] Failed to mark job ${jobId} as failed`, { markErr }) - } +async function buildCleanupRunner(jobType: CleanupJobType): Promise { + const cleanupRunner = await (async () => { + switch (jobType) { + case 'cleanup-logs': + return (await import('@/background/cleanup-logs')).runCleanupLogs + case 'cleanup-soft-deletes': + return (await import('@/background/cleanup-soft-deletes')).runCleanupSoftDeletes + case 'cleanup-tasks': + return (await import('@/background/cleanup-tasks')).runCleanupTasks } })() + return ((payload) => cleanupRunner(payload as CleanupJobPayload)) as EnqueueOptions['runner'] } /** @@ -214,9 +179,10 @@ export async function dispatchCleanupJobs( for (const plan of plansWithDefaults) { const payload: CleanupJobPayload = { plan } - const jobId = await jobQueue.enqueue(jobType, payload) + const jobId = await jobQueue.enqueue(jobType, payload, { + runner: shouldExecuteInline() ? await buildCleanupRunner(jobType) : undefined, + }) jobIds.push(jobId) - await runInlineIfNeeded(jobQueue, jobType, jobId, payload) } // Enterprise: workspaces whose owning org is on an active enterprise sub and @@ -270,12 +236,11 @@ export async function dispatchCleanupJobs( } } else { // Fallback: parallel enqueue via abstraction + const inlineRunner = shouldExecuteInline() ? await buildCleanupRunner(jobType) : undefined const results = await Promise.allSettled( enterpriseRows.map(async (row) => { const payload: CleanupJobPayload = { plan: 'enterprise', workspaceId: row.id } - const jobId = await jobQueue.enqueue(jobType, payload) - await runInlineIfNeeded(jobQueue, jobType, jobId, payload) - return jobId + return jobQueue.enqueue(jobType, payload, { runner: inlineRunner }) }) ) diff --git a/apps/sim/lib/copilot/generated/tool-catalog-v1.ts b/apps/sim/lib/copilot/generated/tool-catalog-v1.ts index 5083432bd87..616244da295 100644 --- a/apps/sim/lib/copilot/generated/tool-catalog-v1.ts +++ b/apps/sim/lib/copilot/generated/tool-catalog-v1.ts @@ -2792,6 +2792,15 @@ export const UserTable: ToolCatalogEntry = { type: 'object', description: 'Arguments for the operation', properties: { + autoRun: { + type: 'boolean', + description: + "Optional flag for add_workflow_group and update_workflow_group. On add: when true, existing rows whose dependencies are already filled run immediately; default false stages the group silently — call run_column when ready to fire rows. On update: toggle a group's auto-fire behavior on an existing group — false stages it (no auto-runs on dep satisfaction; only manual run_column fires rows), true re-enables auto-fire (rows whose deps fill will be scheduled). Set true on add only if the user explicitly asked to start runs immediately.", + }, + blockId: { + type: 'string', + description: 'Source block ID inside the workflow. Used by add_workflow_group_output.', + }, column: { type: 'object', description: 'Column definition for add_column: { name, type, unique?, position? }', @@ -2799,7 +2808,7 @@ export const UserTable: ToolCatalogEntry = { columnName: { type: 'string', description: - 'Column name (required for rename_column, update_column; use columnNames array for batch delete_column)', + 'Column name. Required for rename_column, update_column, and delete_workflow_group_output (the bound column to drop). Optional for add_workflow_group_output (auto-derived from path when omitted). Use columnNames array for batch delete_column.', }, columnNames: { type: 'array', @@ -2810,6 +2819,19 @@ export const UserTable: ToolCatalogEntry = { type: 'object', description: 'Row data as key-value pairs (required for insert_row, update_row)', }, + dependencies: { + type: 'object', + description: + "Dependencies the workflow group requires before running a row. { columns?: string[] } lists input column names that must be filled. Workflow output columns count too — depend on the column produced by an upstream group, not the group itself. The dep graph is column-induced. A group can't depend on its own output columns. Used by add_workflow_group and update_workflow_group.", + properties: { + columns: { + type: 'array', + description: + 'Input column names that must be filled before the group runs. Plain columns and upstream-group output columns are both valid here.', + items: { type: 'string' }, + }, + }, + }, description: { type: 'string', description: "Table description (optional for 'create')" }, fileId: { type: 'string', @@ -2826,6 +2848,17 @@ export const UserTable: ToolCatalogEntry = { description: 'MongoDB-style filter for query_rows, update_rows_by_filter, delete_rows_by_filter', }, + groupId: { + type: 'string', + description: + 'Workflow group ID. Required for update_workflow_group, delete_workflow_group, add_workflow_group_output, delete_workflow_group_output.', + }, + groupIds: { + type: 'array', + description: + 'Array of workflow group IDs. Required for run_column — non-empty list of columns to run.', + items: { type: 'string' }, + }, limit: { type: 'number', description: 'Maximum rows to return or affect (optional, default 100)', @@ -2833,11 +2866,29 @@ export const UserTable: ToolCatalogEntry = { mapping: { type: 'object', description: - 'Optional explicit CSV-header → table-column mapping for import_file, as { "csvHeader": "columnName" | null }. When omitted, headers are auto-matched by sanitized name (case-insensitive fallback). Use null to skip a CSV column.', + 'Optional explicit CSV-header → table-column mapping for import_file, as { "csvHeader": "columnName" | null }. A string maps the CSV header to that table column; null skips that CSV header (it won\'t be imported); omit a header entirely to fall back to auto-mapping by sanitized name (case-insensitive).', additionalProperties: { - type: 'string', + type: ['string', 'null'], description: - 'Target column name on the table. Use null to skip this CSV header instead of a column name.', + "Target column name on the table. null skips that CSV header (it won't be imported); omit it entirely to fall back to auto-mapping.", + }, + }, + mappingUpdates: { + type: 'array', + description: + "Surgical per-output remap for update_workflow_group. Each entry repoints ONE existing output column to a new (blockId, path) without touching the rest of the group. Use this when the user wants to swap which block output flows into a column (e.g. 'point the score column at the new agent block') — the bound column stays, only its source pair changes. Stale row data for remapped columns is cleared and backfilled from saved execution logs where possible (no re-run needed). Use this INSTEAD of resending the full outputs array when the change is scoped to a few columns; use outputs only when the whole group's output set is being restructured. Discover valid (blockId, path) pairs via list_workflow_outputs first.", + items: { + type: 'object', + properties: { + blockId: { type: 'string', description: 'New source block ID for this column.' }, + columnName: { + type: 'string', + description: + 'The existing output column to remap. Must already be bound to this group.', + }, + path: { type: 'string', description: 'New dotted output path on the new block.' }, + }, + required: ['columnName', 'blockId', 'path'], }, }, mode: { @@ -2868,6 +2919,33 @@ export const UserTable: ToolCatalogEntry = { description: 'Pipe query_rows results directly to a NEW workspace file. The format is auto-inferred from the file extension: .csv → CSV, .json → JSON, .md → Markdown, etc. Use .csv for tabular exports. Use a flat path like "files/export.csv" — nested paths are not supported.', }, + outputs: { + type: 'array', + description: + "Outputs to surface as columns. Each entry maps a workflow block output to a table column: { blockId, path, columnName?, columnType? }. blockId is the source block; path is the dotted output path; columnName auto-derives from the path when omitted; columnType defaults from the leaf type when omitted. Used by add_workflow_group for the full output set. For update_workflow_group, prefer add_workflow_group_output / delete_workflow_group_output for individual outputs and mappingUpdates for surgical remap; only pass outputs here when restructuring the whole group's output set in one shot. If unsure about valid (blockId, path) pairs, call list_workflow_outputs first — paths are validated against the live workflow and invalid picks return an error with the valid options. For Agent blocks with structured outputs, the structured fields appear as top-level paths (e.g. summary, industry); there is NO response.content path on a structured agent.", + items: { + type: 'object', + properties: { + blockId: { type: 'string', description: 'Source block ID inside the workflow.' }, + columnName: { + type: 'string', + description: + 'Optional target column name. Auto-derived from the path when omitted.', + }, + columnType: { + type: 'string', + description: 'Optional column type. Defaults from the leaf type when omitted.', + enum: ['string', 'number', 'boolean', 'date', 'json'], + }, + path: { type: 'string', description: 'Dotted output path on the block.' }, + }, + required: ['blockId', 'path'], + }, + }, + path: { + type: 'string', + description: 'Dotted output path on the block. Used by add_workflow_group_output.', + }, position: { type: 'integer', description: @@ -2881,21 +2959,36 @@ export const UserTable: ToolCatalogEntry = { }, rowId: { type: 'string', - description: 'Row ID (required for get_row, update_row, delete_row)', + description: + "Row ID. Required for get_row, update_row, delete_row, and for cancel_table_runs when scope:'row'.", }, rowIds: { type: 'array', - description: 'Array of row IDs to delete (for batch_delete_rows)', + description: + 'Array of row IDs. Used by batch_delete_rows (rows to delete) and run_column (optional row scope — when omitted, runs across the whole table; when provided, only these rows are candidates and the server eligibility predicate still applies).', + items: { type: 'string' }, }, rows: { type: 'array', description: 'Array of row data objects (required for batch_insert_rows)', }, + runMode: { + type: 'string', + description: + "Run mode for run_column. 'incomplete' (default) re-runs only rows that never produced output or last failed; 'all' re-runs every dep-satisfied row.", + enum: ['incomplete', 'all'], + }, schema: { type: 'object', description: "Table schema with columns array (required for 'create'). Each column: { name, type, unique? }", }, + scope: { + type: 'string', + description: + "Cancellation scope for cancel_table_runs. 'all' cancels in-flight runs across the whole table; 'row' cancels only the row identified by rowId.", + enum: ['all', 'row'], + }, sort: { type: 'object', description: @@ -2925,6 +3018,11 @@ export const UserTable: ToolCatalogEntry = { description: 'Map of rowId to value for single-column batch update: { "rowId1": val1, "rowId2": val2 } (for batch_update_rows with columnName)', }, + workflowId: { + type: 'string', + description: + 'ID of the workflow (required for add_workflow_group and list_workflow_outputs).', + }, }, }, operation: { @@ -2951,6 +3049,14 @@ export const UserTable: ToolCatalogEntry = { 'rename_column', 'delete_column', 'update_column', + 'add_workflow_group', + 'update_workflow_group', + 'delete_workflow_group', + 'add_workflow_group_output', + 'delete_workflow_group_output', + 'run_column', + 'cancel_table_runs', + 'list_workflow_outputs', ], }, }, @@ -3289,6 +3395,14 @@ export const UserTableOperation = { renameColumn: 'rename_column', deleteColumn: 'delete_column', updateColumn: 'update_column', + addWorkflowGroup: 'add_workflow_group', + updateWorkflowGroup: 'update_workflow_group', + deleteWorkflowGroup: 'delete_workflow_group', + addWorkflowGroupOutput: 'add_workflow_group_output', + deleteWorkflowGroupOutput: 'delete_workflow_group_output', + runColumn: 'run_column', + cancelTableRuns: 'cancel_table_runs', + listWorkflowOutputs: 'list_workflow_outputs', } as const export type UserTableOperation = (typeof UserTableOperation)[keyof typeof UserTableOperation] @@ -3314,6 +3428,14 @@ export const UserTableOperationValues = [ UserTableOperation.renameColumn, UserTableOperation.deleteColumn, UserTableOperation.updateColumn, + UserTableOperation.addWorkflowGroup, + UserTableOperation.updateWorkflowGroup, + UserTableOperation.deleteWorkflowGroup, + UserTableOperation.addWorkflowGroupOutput, + UserTableOperation.deleteWorkflowGroupOutput, + UserTableOperation.runColumn, + UserTableOperation.cancelTableRuns, + UserTableOperation.listWorkflowOutputs, ] as const export const WorkspaceFileOperation = { diff --git a/apps/sim/lib/copilot/generated/tool-schemas-v1.ts b/apps/sim/lib/copilot/generated/tool-schemas-v1.ts index 5a2e2a196d8..8a7aebc3e7a 100644 --- a/apps/sim/lib/copilot/generated/tool-schemas-v1.ts +++ b/apps/sim/lib/copilot/generated/tool-schemas-v1.ts @@ -2594,6 +2594,16 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { type: 'object', description: 'Arguments for the operation', properties: { + autoRun: { + type: 'boolean', + description: + "Optional flag for add_workflow_group and update_workflow_group. On add: when true, existing rows whose dependencies are already filled run immediately; default false stages the group silently — call run_column when ready to fire rows. On update: toggle a group's auto-fire behavior on an existing group — false stages it (no auto-runs on dep satisfaction; only manual run_column fires rows), true re-enables auto-fire (rows whose deps fill will be scheduled). Set true on add only if the user explicitly asked to start runs immediately.", + }, + blockId: { + type: 'string', + description: + 'Source block ID inside the workflow. Used by add_workflow_group_output.', + }, column: { type: 'object', description: 'Column definition for add_column: { name, type, unique?, position? }', @@ -2601,7 +2611,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { columnName: { type: 'string', description: - 'Column name (required for rename_column, update_column; use columnNames array for batch delete_column)', + 'Column name. Required for rename_column, update_column, and delete_workflow_group_output (the bound column to drop). Optional for add_workflow_group_output (auto-derived from path when omitted). Use columnNames array for batch delete_column.', }, columnNames: { type: 'array', @@ -2612,6 +2622,21 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { type: 'object', description: 'Row data as key-value pairs (required for insert_row, update_row)', }, + dependencies: { + type: 'object', + description: + "Dependencies the workflow group requires before running a row. { columns?: string[] } lists input column names that must be filled. Workflow output columns count too — depend on the column produced by an upstream group, not the group itself. The dep graph is column-induced. A group can't depend on its own output columns. Used by add_workflow_group and update_workflow_group.", + properties: { + columns: { + type: 'array', + description: + 'Input column names that must be filled before the group runs. Plain columns and upstream-group output columns are both valid here.', + items: { + type: 'string', + }, + }, + }, + }, description: { type: 'string', description: "Table description (optional for 'create')", @@ -2631,6 +2656,19 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { description: 'MongoDB-style filter for query_rows, update_rows_by_filter, delete_rows_by_filter', }, + groupId: { + type: 'string', + description: + 'Workflow group ID. Required for update_workflow_group, delete_workflow_group, add_workflow_group_output, delete_workflow_group_output.', + }, + groupIds: { + type: 'array', + description: + 'Array of workflow group IDs. Required for run_column — non-empty list of columns to run.', + items: { + type: 'string', + }, + }, limit: { type: 'number', description: 'Maximum rows to return or affect (optional, default 100)', @@ -2638,11 +2676,35 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { mapping: { type: 'object', description: - 'Optional explicit CSV-header → table-column mapping for import_file, as { "csvHeader": "columnName" | null }. When omitted, headers are auto-matched by sanitized name (case-insensitive fallback). Use null to skip a CSV column.', + 'Optional explicit CSV-header → table-column mapping for import_file, as { "csvHeader": "columnName" | null }. A string maps the CSV header to that table column; null skips that CSV header (it won\'t be imported); omit a header entirely to fall back to auto-mapping by sanitized name (case-insensitive).', additionalProperties: { - type: 'string', + type: ['string', 'null'], description: - 'Target column name on the table. Use null to skip this CSV header instead of a column name.', + "Target column name on the table. null skips that CSV header (it won't be imported); omit it entirely to fall back to auto-mapping.", + }, + }, + mappingUpdates: { + type: 'array', + description: + "Surgical per-output remap for update_workflow_group. Each entry repoints ONE existing output column to a new (blockId, path) without touching the rest of the group. Use this when the user wants to swap which block output flows into a column (e.g. 'point the score column at the new agent block') — the bound column stays, only its source pair changes. Stale row data for remapped columns is cleared and backfilled from saved execution logs where possible (no re-run needed). Use this INSTEAD of resending the full outputs array when the change is scoped to a few columns; use outputs only when the whole group's output set is being restructured. Discover valid (blockId, path) pairs via list_workflow_outputs first.", + items: { + type: 'object', + properties: { + blockId: { + type: 'string', + description: 'New source block ID for this column.', + }, + columnName: { + type: 'string', + description: + 'The existing output column to remap. Must already be bound to this group.', + }, + path: { + type: 'string', + description: 'New dotted output path on the new block.', + }, + }, + required: ['columnName', 'blockId', 'path'], }, }, mode: { @@ -2679,6 +2741,39 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { description: 'Pipe query_rows results directly to a NEW workspace file. The format is auto-inferred from the file extension: .csv → CSV, .json → JSON, .md → Markdown, etc. Use .csv for tabular exports. Use a flat path like "files/export.csv" — nested paths are not supported.', }, + outputs: { + type: 'array', + description: + "Outputs to surface as columns. Each entry maps a workflow block output to a table column: { blockId, path, columnName?, columnType? }. blockId is the source block; path is the dotted output path; columnName auto-derives from the path when omitted; columnType defaults from the leaf type when omitted. Used by add_workflow_group for the full output set. For update_workflow_group, prefer add_workflow_group_output / delete_workflow_group_output for individual outputs and mappingUpdates for surgical remap; only pass outputs here when restructuring the whole group's output set in one shot. If unsure about valid (blockId, path) pairs, call list_workflow_outputs first — paths are validated against the live workflow and invalid picks return an error with the valid options. For Agent blocks with structured outputs, the structured fields appear as top-level paths (e.g. summary, industry); there is NO response.content path on a structured agent.", + items: { + type: 'object', + properties: { + blockId: { + type: 'string', + description: 'Source block ID inside the workflow.', + }, + columnName: { + type: 'string', + description: + 'Optional target column name. Auto-derived from the path when omitted.', + }, + columnType: { + type: 'string', + description: 'Optional column type. Defaults from the leaf type when omitted.', + enum: ['string', 'number', 'boolean', 'date', 'json'], + }, + path: { + type: 'string', + description: 'Dotted output path on the block.', + }, + }, + required: ['blockId', 'path'], + }, + }, + path: { + type: 'string', + description: 'Dotted output path on the block. Used by add_workflow_group_output.', + }, position: { type: 'integer', description: @@ -2694,21 +2789,38 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { }, rowId: { type: 'string', - description: 'Row ID (required for get_row, update_row, delete_row)', + description: + "Row ID. Required for get_row, update_row, delete_row, and for cancel_table_runs when scope:'row'.", }, rowIds: { type: 'array', - description: 'Array of row IDs to delete (for batch_delete_rows)', + description: + 'Array of row IDs. Used by batch_delete_rows (rows to delete) and run_column (optional row scope — when omitted, runs across the whole table; when provided, only these rows are candidates and the server eligibility predicate still applies).', + items: { + type: 'string', + }, }, rows: { type: 'array', description: 'Array of row data objects (required for batch_insert_rows)', }, + runMode: { + type: 'string', + description: + "Run mode for run_column. 'incomplete' (default) re-runs only rows that never produced output or last failed; 'all' re-runs every dep-satisfied row.", + enum: ['incomplete', 'all'], + }, schema: { type: 'object', description: "Table schema with columns array (required for 'create'). Each column: { name, type, unique? }", }, + scope: { + type: 'string', + description: + "Cancellation scope for cancel_table_runs. 'all' cancels in-flight runs across the whole table; 'row' cancels only the row identified by rowId.", + enum: ['all', 'row'], + }, sort: { type: 'object', description: @@ -2740,6 +2852,11 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { description: 'Map of rowId to value for single-column batch update: { "rowId1": val1, "rowId2": val2 } (for batch_update_rows with columnName)', }, + workflowId: { + type: 'string', + description: + 'ID of the workflow (required for add_workflow_group and list_workflow_outputs).', + }, }, }, operation: { @@ -2766,6 +2883,14 @@ export const TOOL_RUNTIME_SCHEMAS: Record = { 'rename_column', 'delete_column', 'update_column', + 'add_workflow_group', + 'update_workflow_group', + 'delete_workflow_group', + 'add_workflow_group_output', + 'delete_workflow_group_output', + 'run_column', + 'cancel_table_runs', + 'list_workflow_outputs', ], }, }, diff --git a/apps/sim/lib/copilot/tools/server/table/user-table.ts b/apps/sim/lib/copilot/tools/server/table/user-table.ts index 22c807dfdba..d3b768d699d 100644 --- a/apps/sim/lib/copilot/tools/server/table/user-table.ts +++ b/apps/sim/lib/copilot/tools/server/table/user-table.ts @@ -56,7 +56,7 @@ import type { WorkflowGroupDependencies, WorkflowGroupOutput, } from '@/lib/table/types' -import { cancelWorkflowGroupRuns, triggerWorkflowGroupRun } from '@/lib/table/workflow-columns' +import { cancelWorkflowGroupRuns, runWorkflowColumn } from '@/lib/table/workflow-columns' import { fetchWorkspaceFileBuffer, resolveWorkspaceFileReference, @@ -1283,6 +1283,10 @@ export const userTableServerTool: BaseServerTool dependencies: args.dependencies as WorkflowGroupDependencies | undefined, outputs: updateOutputs, newOutputColumns: args.newOutputColumns as ColumnDefinition[] | undefined, + mappingUpdates: args.mappingUpdates as + | Array<{ columnName: string; blockId: string; path: string }> + | undefined, + autoRun: typeof args.autoRun === 'boolean' ? args.autoRun : undefined, }, requestId ) @@ -1372,13 +1376,21 @@ export const userTableServerTool: BaseServerTool } } - case 'run_workflow_group': { + case 'run_column': { if (!args.tableId) return { success: false, message: 'Table ID is required' } if (!workspaceId) return { success: false, message: 'Workspace ID is required' } - const groupId = args.groupId as string | undefined - if (!groupId) { - return { success: false, message: 'groupId is required for run_workflow_group' } + const rawGroupIds = args.groupIds as unknown + if ( + !Array.isArray(rawGroupIds) || + rawGroupIds.length === 0 || + rawGroupIds.some((id) => typeof id !== 'string' || id.length === 0) + ) { + return { + success: false, + message: 'groupIds must be a non-empty array of group id strings', + } } + const groupIds = rawGroupIds as string[] const runMode = (args.runMode as 'all' | 'incomplete' | undefined) ?? 'incomplete' if (runMode !== 'all' && runMode !== 'incomplete') { return { @@ -1403,18 +1415,18 @@ export const userTableServerTool: BaseServerTool } const requestId = generateId().slice(0, 8) assertNotAborted() - const { triggered } = await triggerWorkflowGroupRun({ + const { triggered } = await runWorkflowColumn({ tableId: args.tableId, - groupId, workspaceId, + groupIds, mode: runMode, - requestId, rowIds, + requestId, }) const scopeLabel = rowIds ? `${rowIds.length} row(s) by id` : runMode return { success: true, - message: `Triggered ${triggered} row(s) for workflow group ${groupId} (${scopeLabel})`, + message: `Triggered ${triggered} row(s) across ${groupIds.length} column(s) (${scopeLabel})`, data: { triggered }, } } diff --git a/apps/sim/lib/core/async-jobs/backends/database.ts b/apps/sim/lib/core/async-jobs/backends/database.ts index b11ee70b148..4c96bb86e94 100644 --- a/apps/sim/lib/core/async-jobs/backends/database.ts +++ b/apps/sim/lib/core/async-jobs/backends/database.ts @@ -1,8 +1,8 @@ import { asyncJobs, db } from '@sim/db' import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' import { generateId } from '@sim/utils/id' import { eq, sql } from 'drizzle-orm' -import { abortInlineJob } from '@/lib/core/async-jobs/inline-abort' import { type EnqueueOptions, JOB_STATUS, @@ -16,6 +16,7 @@ import { const logger = createLogger('DatabaseJobQueue') type AsyncJobRow = typeof asyncJobs.$inferSelect +type Runner = NonNullable function rowToJob(row: AsyncJobRow): Job { return { @@ -34,6 +35,38 @@ function rowToJob(row: AsyncJobRow): Job { } } +const inlineAbortControllers = new Map() + +interface Semaphore { + available: number + waiters: Array<() => void> +} +const semaphores = new Map() + +async function acquireSlot(key: string, limit: number): Promise { + let s = semaphores.get(key) + if (!s) { + s = { available: limit, waiters: [] } + semaphores.set(key, s) + } + if (s.available > 0) { + s.available -= 1 + return + } + await new Promise((resolve) => s.waiters.push(resolve)) +} + +function releaseSlot(key: string): void { + const s = semaphores.get(key) + if (!s) return + const next = s.waiters.shift() + if (next) { + next() + return + } + s.available += 1 +} + export class DatabaseJobQueue implements JobQueueBackend { async enqueue( type: JobType, @@ -56,9 +89,58 @@ export class DatabaseJobQueue implements JobQueueBackend { }) logger.debug('Enqueued job', { jobId, type }) + if (options?.runner) { + this.runInline( + type, + jobId, + payload, + options.runner, + options.concurrencyKey, + options.concurrencyLimit + ) + } return jobId } + async batchEnqueue( + type: JobType, + items: Array<{ payload: TPayload; options?: EnqueueOptions }> + ): Promise { + if (items.length === 0) return [] + const now = new Date() + const rows = items.map(({ payload, options }) => ({ + id: `run_${generateId().replace(/-/g, '').slice(0, 20)}`, + type, + payload: payload as Record, + status: JOB_STATUS.PENDING, + createdAt: now, + attempts: 0, + maxAttempts: options?.maxAttempts ?? 3, + metadata: (options?.metadata ?? {}) as Record, + updatedAt: now, + })) + + await db.insert(asyncJobs).values(rows) + + logger.debug('Batch-enqueued jobs', { count: rows.length, type }) + + for (let i = 0; i < items.length; i++) { + const { payload, options } = items[i] + if (options?.runner) { + this.runInline( + type, + rows[i].id, + payload, + options.runner, + options.concurrencyKey, + options.concurrencyLimit + ) + } + } + + return rows.map((r) => r.id) + } + async getJob(jobId: string): Promise { const [row] = await db.select().from(asyncJobs).where(eq(asyncJobs.id, jobId)).limit(1) @@ -116,9 +198,14 @@ export class DatabaseJobQueue implements JobQueueBackend { async cancelJob(jobId: string): Promise { // Abort any in-process inline execution first so the running workflow // observes the signal and stops mid-flight. Then mark the row failed so - // any future poller skips it. The DB queue is single-process / dev-only, - // so an in-memory registry is sufficient for cross-call abort. - const aborted = abortInlineJob(jobId) + // any future poller skips it. + const controller = inlineAbortControllers.get(jobId) + let aborted = false + if (controller) { + controller.abort('Cancelled') + inlineAbortControllers.delete(jobId) + aborted = true + } const now = new Date() await db @@ -133,4 +220,45 @@ export class DatabaseJobQueue implements JobQueueBackend { logger.debug('Marked job as cancelled (DB queue)', { jobId, abortedInline: aborted }) } + + /** + * Fire-and-forget IIFE that owns the lifecycle for an inline job: registers + * the abort controller (so `cancelJob` can interrupt mid-flight), acquires + * a concurrency slot if `concurrencyKey` is set, drives + * `startJob → runner → completeJob | markJobFailed`. + */ + private runInline( + type: JobType, + jobId: string, + payload: TPayload, + runner: Runner, + concurrencyKey?: string, + concurrencyLimit?: number + ): void { + const abortController = new AbortController() + inlineAbortControllers.set(jobId, abortController) + void (async () => { + if (concurrencyKey && concurrencyLimit && concurrencyLimit > 0) { + await acquireSlot(concurrencyKey, concurrencyLimit) + } + try { + await this.startJob(jobId) + await runner(payload, abortController.signal) + await this.completeJob(jobId, null) + } catch (err) { + const message = toError(err).message + logger.error(`[${type}] Inline job ${jobId} failed`, { error: message }) + try { + await this.markJobFailed(jobId, message) + } catch (markErr) { + logger.error(`[${type}] Failed to mark job ${jobId} as failed`, { markErr }) + } + } finally { + inlineAbortControllers.delete(jobId) + if (concurrencyKey && concurrencyLimit && concurrencyLimit > 0) { + releaseSlot(concurrencyKey) + } + } + })() + } } diff --git a/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts b/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts index 7427108e141..e7d04bb7352 100644 --- a/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts +++ b/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts @@ -82,6 +82,23 @@ export class TriggerDevJobQueue implements JobQueueBackend { return handle.id } + async batchEnqueue( + type: JobType, + items: Array<{ payload: TPayload; options?: EnqueueOptions }> + ): Promise { + if (items.length === 0) return [] + // tasks.batchTrigger returns only a batchId, not per-item run IDs, so we + // can't use it when callers need to track individual runs (e.g. table cell + // tasks need per-row jobIds for cancellation). Sequential `tasks.trigger` + // gives us per-item IDs and naturally preserves input order in the queue. + const ids: string[] = [] + for (const { payload, options } of items) { + const id = await this.enqueue(type, payload, options) + ids.push(id) + } + return ids + } + async getJob(jobId: string): Promise { try { const run = await runs.retrieve(jobId) diff --git a/apps/sim/lib/core/async-jobs/inline-abort.ts b/apps/sim/lib/core/async-jobs/inline-abort.ts deleted file mode 100644 index af4179da39f..00000000000 --- a/apps/sim/lib/core/async-jobs/inline-abort.ts +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Process-local registry of `AbortController`s for jobs running inline - * (i.e. on the same Node process that enqueued them — the database-backed - * queue path). The trigger.dev backend does not use this: cancellation there - * is handled by `runs.cancel(jobId)` which interrupts the worker. - * - * Wiring: - * - `runWorkflowColumn` registers a controller after enqueue (keyed by the - * returned `jobId`) and passes its `signal` into the inline task body. - * - `DatabaseJobQueue.cancelJob` looks up the controller and aborts it so - * the running workflow execution can observe the signal mid-flight. - * - The IIFE that owns the controller unregisters in `finally`. - */ -const inlineAbortControllers = new Map() - -export function registerInlineAbort(jobId: string, controller: AbortController): void { - inlineAbortControllers.set(jobId, controller) -} - -export function unregisterInlineAbort(jobId: string): void { - inlineAbortControllers.delete(jobId) -} - -/** - * Aborts the in-process controller for `jobId` if one is registered. Safe to - * call from `cancelJob` regardless of whether the job ran inline. Returns - * true if a controller was found and aborted. - */ -export function abortInlineJob(jobId: string, reason = 'Cancelled'): boolean { - const controller = inlineAbortControllers.get(jobId) - if (!controller) return false - controller.abort(reason) - inlineAbortControllers.delete(jobId) - return true -} diff --git a/apps/sim/lib/core/async-jobs/types.ts b/apps/sim/lib/core/async-jobs/types.ts index 515fe784c21..e1d2c411313 100644 --- a/apps/sim/lib/core/async-jobs/types.ts +++ b/apps/sim/lib/core/async-jobs/types.ts @@ -78,10 +78,24 @@ export interface EnqueueOptions { delayMs?: number tags?: string[] /** - * Trigger.dev concurrency key. Combined with the task's `queue.concurrencyLimit`, - * limits parallel runs sharing this key. The database backend ignores it. + * Combined with the task's `queue.concurrencyLimit`, caps parallel runs + * sharing this key. Trigger.dev enforces server-side; the database backend + * enforces in-process via a FIFO semaphore. */ concurrencyKey?: string + /** + * Per-key concurrency cap. Database backend only — trigger.dev reads this + * from the task definition (`queue.concurrencyLimit`). + */ + concurrencyLimit?: number + /** + * Job body invoked when the queue backend lacks an external worker. + * Trigger.dev ignores this (its workers execute the task definition); + * the database backend kicks it off as a fire-and-forget IIFE so the + * row drives through `processing → completed | failed`. Receives the + * payload and an `AbortSignal` driven by `cancelJob`. + */ + runner?: (payload: TPayload, signal: AbortSignal) => Promise } /** @@ -94,6 +108,16 @@ export interface JobQueueBackend { */ enqueue(type: JobType, payload: TPayload, options?: EnqueueOptions): Promise + /** + * Enqueue multiple jobs as a single batch. Returns one jobId per item, in + * input order. Backends preserve input order in queue dispatch (trigger.dev + * via tasks.batchTrigger, database via a single multi-row INSERT). + */ + batchEnqueue( + type: JobType, + items: Array<{ payload: TPayload; options?: EnqueueOptions }> + ): Promise + /** * Get a job by ID */ diff --git a/apps/sim/lib/table/cell-write.ts b/apps/sim/lib/table/cell-write.ts index 73f4fda251c..24826fe5d13 100644 --- a/apps/sim/lib/table/cell-write.ts +++ b/apps/sim/lib/table/cell-write.ts @@ -55,6 +55,25 @@ export async function writeWorkflowGroupState( return 'wrote' } const current = row.executions?.[groupId] as RowExecutionMetadata | undefined + // Stale-worker guard: only blocks writes FROM an old worker (status = + // running / completed / error / pending). A `queued` stamp is the scheduler + // claiming the cell for a brand-new run — the new executionId is supposed + // to overwrite whatever was there. Same for `cancelled` (authoritative). + // Without this carve-out, the new run's stamp gets rejected and the cell + // is stuck in its old state forever. + const isAuthoritativeNewStamp = + payload.executionState.status === 'queued' || payload.executionState.status === 'cancelled' + if ( + !isAuthoritativeNewStamp && + current && + current.executionId && + current.executionId !== executionId + ) { + logger.info( + `Skipping group write — stale worker (table=${tableId} row=${rowId} group=${groupId} mine=${executionId} active=${current.executionId})` + ) + return 'skipped' + } if ( current?.status === 'cancelled' && current.executionId === executionId && @@ -66,11 +85,11 @@ export async function writeWorkflowGroupState( return 'skipped' } // Skip writing `cancelled` state with the guard — that's an authoritative - // write from `cancelWorkflowGroupRuns` and must always land. Cell-task - // writes (running/completed/error) get the SQL guard so an in-flight - // partial can't clobber a stop click that already committed. - const cancellationGuard = - payload.executionState.status === 'cancelled' ? undefined : { groupId, executionId } + // write from `cancelWorkflowGroupRuns` and must always land. New `queued` + // stamps from the scheduler also bypass — they ARE the new authority. Cell- + // task writes (running/completed/error) get the SQL guard so an in-flight + // partial can't clobber a stop click or a newer run that already committed. + const cancellationGuard = isAuthoritativeNewStamp ? undefined : { groupId, executionId } const result = await updateRow( { tableId, @@ -92,6 +111,28 @@ export async function writeWorkflowGroupState( return 'wrote' } +/** + * Flips `queued` → `running` to signal the cell task body has actually been + * picked up by a worker. The renderer uses the `queued` vs `running` distinction + * to label cells "Queued" vs "Waiting" (worker started, this block hasn't run + * yet) — without this marker we couldn't tell if a row was sitting in the + * trigger.dev queue or actively executing. + */ +export async function markWorkflowGroupPickedUp( + ctx: WriteWorkflowGroupContext, + prev: Pick +): Promise<'wrote' | 'skipped'> { + return writeWorkflowGroupState(ctx, { + executionState: { + status: 'running', + executionId: ctx.executionId, + jobId: prev.jobId, + workflowId: prev.workflowId, + error: null, + }, + }) +} + /** Builds the canonical `cancelled` execution state used by every cancel path. * Preserves `blockErrors` from the prior state so errored cells keep * rendering Error after a stop click — only cells that hadn't yet produced diff --git a/apps/sim/lib/table/deps.ts b/apps/sim/lib/table/deps.ts new file mode 100644 index 00000000000..d9b33f59dbb --- /dev/null +++ b/apps/sim/lib/table/deps.ts @@ -0,0 +1,144 @@ +/** + * Pure dep-satisfaction helpers shared by the server-side scheduler and the + * client UI. Lives in its own file (not `workflow-columns.ts`) so the client + * can import it without pulling in `@sim/db` and other server-only deps. + */ + +import { createLogger } from '@sim/logger' +import type { RowData, RowExecutionMetadata, RowExecutions, TableRow, WorkflowGroup } from './types' + +const logger = createLogger('OptimisticCascade') + +/** + * True when the cell has a worker actively reserved — `queued` / `running`, + * or `pending` after the scheduler stamped a jobId. Single source of truth + * for the "is this exec in flight" classification across the eligibility + * predicate, optimistic patches, status counters, and renderer. `pending` + * without a jobId is the optimistic-flag-only state, not in-flight. + */ +export function isExecInFlight(exec: RowExecutionMetadata | undefined): boolean { + if (!exec) return false + const s = exec.status + if (s === 'queued' || s === 'running') return true + if (s === 'pending' && exec.jobId) return true + return false +} + +/** + * True when every output column the group writes still has a non-empty value + * on this row. The "completed" exec status is metadata, but the cells are the + * source of truth — if the user cleared an output cell, the row is effectively + * incomplete and should be re-run on dep-fill / manual incomplete-mode runs. + */ +export function areOutputsFilled(group: WorkflowGroup, row: TableRow): boolean { + if (group.outputs.length === 0) return true + for (const o of group.outputs) { + const v = row.data[o.columnName] + if (v === null || v === undefined || v === '') return false + } + return true +} + +/** + * Returns true when every column this group depends on is non-empty on this + * row. Workflow output columns count the same as plain columns — the model + * is uniform. + */ +export function areGroupDepsSatisfied(group: WorkflowGroup, row: TableRow): boolean { + const cols = group.dependencies?.columns ?? [] + for (const colName of cols) { + const value = row.data[colName] + if (value === null || value === undefined || value === '') return false + } + return true +} + +export interface UnmetDeps { + /** Column names whose value on this row is empty. */ + columns: string[] +} + +/** + * Like `areGroupDepsSatisfied` but returns *which* columns are unmet, so the + * UI can render "Waiting on column_a, column_b". + */ +export function getUnmetGroupDeps(group: WorkflowGroup, row: TableRow): UnmetDeps { + const cols = group.dependencies?.columns ?? [] + const columns: string[] = [] + for (const colName of cols) { + const value = row.data[colName] + if (value === null || value === undefined || value === '') columns.push(colName) + } + return { columns } +} + +/** + * Optimistic mirror of the server's row-update→scheduler cascade: for every + * workflow group whose deps were unmet *before* the patch and are satisfied + * *after*, return a new `executions` map with that group flipped to + * `pending`. The cell renderer treats `pending` as "Queued", which is what + * the user expects to see immediately after they fill in the missing input — + * not a flash of dash before the server's pending write arrives. + * + * Returns `null` when nothing changed, so callers can short-circuit. + */ +export function optimisticallyScheduleNewlyEligibleGroups( + groups: WorkflowGroup[], + beforeRow: TableRow, + patch: Partial +): RowExecutions | null { + if (groups.length === 0) return null + + const afterRow: TableRow = { + ...beforeRow, + data: { ...beforeRow.data, ...patch } as RowData, + } + + let next: RowExecutions | null = null + let flipped = 0 + let skipped = 0 + for (const group of groups) { + if (group.autoRun === false) { + skipped++ + continue + } + if (!areGroupDepsSatisfied(group, afterRow)) { + skipped++ + continue + } + + const exec = beforeRow.executions?.[group.id] + if (exec?.status === 'queued' || exec?.status === 'running') { + skipped++ + continue + } + if (exec?.status === 'pending' && exec.jobId) { + skipped++ + continue + } + + const isStaleCompleted = exec?.status === 'completed' && !areOutputsFilled(group, afterRow) + const wasSatisfied = areGroupDepsSatisfied(group, beforeRow) + const becameSatisfied = !wasSatisfied + const isRetryable = exec?.status === 'cancelled' || exec?.status === 'error' + if (!becameSatisfied && !isStaleCompleted && !isRetryable && exec) { + skipped++ + continue + } + + flipped++ + if (next === null) next = { ...(beforeRow.executions ?? {}) } + const pending: RowExecutionMetadata = { + status: 'pending', + executionId: exec?.executionId ?? null, + jobId: null, + workflowId: exec?.workflowId ?? group.workflowId, + error: null, + } + next[group.id] = pending + } + if (flipped > 0) { + logger.debug(`[OptimisticCascade] row=${beforeRow.id} flipped=${flipped} skipped=${skipped}`) + } + return next +} diff --git a/apps/sim/lib/table/service.ts b/apps/sim/lib/table/service.ts index 2d8b0a7c96e..cfdb544f7c5 100644 --- a/apps/sim/lib/table/service.ts +++ b/apps/sim/lib/table/service.ts @@ -12,10 +12,8 @@ import { userTableDefinitions, userTableRows, workflowExecutionLogs } from '@sim import { createLogger } from '@sim/logger' import { getPostgresErrorCode } from '@sim/utils/errors' import { generateId } from '@sim/utils/id' -import { and, count, eq, gt, gte, inArray, isNull, sql } from 'drizzle-orm' -import { env } from '@/lib/core/config/env' +import { and, count, eq, gt, gte, inArray, isNull, type SQL, sql } from 'drizzle-orm' import { generateRestoreName } from '@/lib/core/utils/restore-name' -import { getSocketServerUrl } from '@/lib/core/utils/urls' import { COLUMN_TYPES, NAME_PATTERN, TABLE_LIMITS, USER_TABLE_ROWS_SQL_NAME } from './constants' import { buildFilterClause, buildSortClause } from './sql' import { fireTableTrigger } from './trigger' @@ -63,63 +61,15 @@ import { validateTableName, validateTableSchema, } from './validation' -import { assertValidSchema, scheduleWorkflowGroupRuns } from './workflow-columns' +import { + assertValidSchema, + scheduleRunsForRows, + scheduleRunsForTable, + stripGroupDeps, +} from './workflow-columns' const logger = createLogger('TableService') -/** - * Fire-and-forget bridge to the realtime socket server. Mirrors the - * `notifyWorkflowArchived` pattern in `lib/workflows/lifecycle.ts:35`. - * Failures are logged but never thrown — sockets are best-effort, polling - * is the fallback. Each helper sends a single row delta so the realtime - * server can broadcast to subscribed clients in the table room. - */ -function notifyTableRowUpdated(tableId: string, row: TableRow): void { - void fetch(`${getSocketServerUrl()}/api/table-row-updated`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'x-api-key': env.INTERNAL_API_SECRET, - }, - body: JSON.stringify({ - tableId, - rowId: row.id, - data: row.data, - executions: row.executions, - position: row.position, - updatedAt: row.updatedAt instanceof Date ? row.updatedAt.toISOString() : row.updatedAt, - }), - }).catch((err) => { - logger.warn(`table-row-updated bridge failed for ${tableId}/${row.id}:`, err) - }) -} - -function notifyTableRowDeleted(tableId: string, rowId: string): void { - void fetch(`${getSocketServerUrl()}/api/table-row-deleted`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'x-api-key': env.INTERNAL_API_SECRET, - }, - body: JSON.stringify({ tableId, rowId }), - }).catch((err) => { - logger.warn(`table-row-deleted bridge failed for ${tableId}/${rowId}:`, err) - }) -} - -function notifyTableDeleted(tableId: string): void { - void fetch(`${getSocketServerUrl()}/api/table-deleted`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'x-api-key': env.INTERNAL_API_SECRET, - }, - body: JSON.stringify({ tableId }), - }).catch((err) => { - logger.warn(`table-deleted bridge failed for ${tableId}:`, err) - }) -} - export class TableConflictError extends Error { readonly code = 'TABLE_EXISTS' as const constructor(name: string) { @@ -523,13 +473,36 @@ export async function addTableColumn( } const updatedSchema: TableSchema = { ...schema, columns } - assertValidSchema(updatedSchema, table.metadata?.columnOrder) + + // Keep `metadata.columnOrder` in sync: when present, it must list every + // column in `schema.columns`. Splicing the new name in at the same index + // we used in `columns` keeps display ordering aligned with the user's + // intent for `position`-based inserts. + const existingOrder = table.metadata?.columnOrder + let updatedMetadata = table.metadata + if (existingOrder && existingOrder.length > 0 && !existingOrder.includes(column.name)) { + let insertIdx = existingOrder.length + if (column.position !== undefined && column.position >= 0) { + // Anchor on the column previously at `position` — that column shifted + // right by one in `columns`, so the new name slots in at its old spot. + const anchor = schema.columns[column.position]?.name + if (anchor) { + const anchorIdx = existingOrder.indexOf(anchor) + if (anchorIdx !== -1) insertIdx = anchorIdx + } + } + const nextOrder = [...existingOrder] + nextOrder.splice(insertIdx, 0, column.name) + updatedMetadata = { ...table.metadata, columnOrder: nextOrder } + } + + assertValidSchema(updatedSchema, updatedMetadata?.columnOrder) const now = new Date() await db .update(userTableDefinitions) - .set({ schema: updatedSchema, updatedAt: now }) + .set({ schema: updatedSchema, metadata: updatedMetadata, updatedAt: now }) .where(eq(userTableDefinitions.id, tableId)) logger.info(`[${requestId}] Added column "${column.name}" to table ${tableId}`) @@ -537,6 +510,7 @@ export async function addTableColumn( return { ...table, schema: updatedSchema, + metadata: updatedMetadata, updatedAt: now, } } @@ -694,7 +668,6 @@ export async function deleteTable(tableId: string, requestId: string): Promise` and `data->>` accept the JSON key as a parameterized text value; + // no need for `sql.raw` interpolation. const matchFilter = typeof targetValue === 'string' - ? sql`${userTableRows.data}->>${sql.raw(`'${targetColumnName}'`)} = ${String(targetValue)}` - : sql`(${userTableRows.data}->${sql.raw(`'${targetColumnName}'`)})::jsonb = ${JSON.stringify(targetValue)}::jsonb` + ? sql`${userTableRows.data}->>${targetColumnName}::text = ${String(targetValue)}` + : sql`(${userTableRows.data}->${targetColumnName}::text)::jsonb = ${JSON.stringify(targetValue)}::jsonb` // Capacity enforcement for the insert path lives in the `increment_user_table_row_count` // trigger (migration 0198). The update path doesn't change row_count, so no check needed. @@ -1418,8 +1385,7 @@ export async function upsertRow( requestId ) } - notifyTableRowUpdated(data.tableId, result.row) - void scheduleWorkflowGroupRuns(table, [result.row]) + void scheduleRunsForRows(table, [result.row]) return result } @@ -1558,6 +1524,39 @@ export async function getRowById( } } +/** + * When a user edit clears a workflow output column to empty, also clear the + * exec record for that group. Without this, a `cancelled` (or `error`) exec + * sticks on the row even after the user wipes the output, blocking the + * auto-fire reactor (which respects terminal states). Treating the cleared + * cell as "user wants this re-armed" matches the rule that cells are the + * source of truth — we already do this for `completed` via + * `areOutputsFilled` in the eligibility predicate; this extends the same + * behavior to error/cancelled by making the data clear remove the exec. + * + * Returns a merged `executionsPatch` (caller's patch + null for groups whose + * outputs were cleared), or the caller's patch unchanged if nothing applies. + */ +function deriveExecClearsForDataPatch( + dataPatch: RowData, + schema: TableSchema, + callerPatch: Record | undefined +): Record | undefined { + const groupsToClear = new Set() + for (const [columnName, value] of Object.entries(dataPatch)) { + const cleared = value === null || value === undefined || value === '' + if (!cleared) continue + const col = schema.columns.find((c) => c.name === columnName) + if (col?.workflowGroupId) groupsToClear.add(col.workflowGroupId) + } + if (groupsToClear.size === 0) return callerPatch + const merged: Record = { ...(callerPatch ?? {}) } + for (const gid of groupsToClear) { + if (!(gid in merged)) merged[gid] = null + } + return merged +} + /** Merges an `executionsPatch` into the row's existing executions blob. */ function applyExecutionsPatch( existing: RowExecutions, @@ -1575,6 +1574,53 @@ function applyExecutionsPatch( return next } +/** + * Builds a SQL expression that applies the given `executionsPatch` to the + * row's `executions` jsonb in-place — set keys for non-null values, delete + * keys for `null` values. Returns null when the patch is empty/missing. + * + * Why server-side: read-modify-write on the entire jsonb blob races between + * concurrent writers (e.g., a column edit and a manual-retry stamp), so the + * last writer wins for keys it didn't touch and clobbers other writers' + * exec updates. Patching keys at the SQL level keeps each writer's changes + * atomic per-key. + */ +function buildExecutionsSqlPatch( + patch: Record | undefined +): SQL | null { + if (!patch) return null + const entries = Object.entries(patch) + if (entries.length === 0) return null + + let expr: SQL = sql`coalesce(${userTableRows.executions}, '{}'::jsonb)` + for (const [gid, value] of entries) { + if (value === null) { + expr = sql`(${expr}) - ${gid}::text` + } else { + expr = sql`(${expr}) || jsonb_build_object(${gid}::text, ${JSON.stringify(value)}::jsonb)` + } + } + return expr +} + +/** + * Strips the given workflow group ids from every row's `executions` jsonb on + * a table — used by the column / group delete paths so stale running/queued + * exec records don't linger and inflate counters after the group is gone. + * The caller wraps in their own transaction. + */ +async function stripGroupExecutions( + trx: Parameters[0]>[0], + tableId: string, + groupIds: Iterable +): Promise { + for (const gid of groupIds) { + await trx.execute( + sql`UPDATE user_table_rows SET executions = executions - ${gid}::text WHERE table_id = ${tableId} AND executions ? ${gid}::text` + ) + } +} + /** * Updates a single row. * @@ -1600,7 +1646,14 @@ export async function updateRow( ...(existingRow.data as RowData), ...data.data, } - const mergedExecutions = applyExecutionsPatch(existingRow.executions, data.executionsPatch) + // Auto-clear exec records for workflow output columns the user just wiped, + // so the auto-fire reactor sees no exec and re-arms the cell. + const effectiveExecutionsPatch = deriveExecClearsForDataPatch( + data.data, + table.schema, + data.executionsPatch + ) + const mergedExecutions = applyExecutionsPatch(existingRow.executions, effectiveExecutionsPatch) // Validate size const sizeValidation = validateRowSize(mergedData) @@ -1631,26 +1684,46 @@ export async function updateRow( const now = new Date() // Cell-task partial writes pass `cancellationGuard` so the SQL update is a - // no-op when a stop click already wrote `cancelled` for this run between - // the in-process read and now. Without this, an in-flight `running` - // partial-write can land after `cancelled` and clobber it. + // no-op when (a) a stop click already wrote `cancelled` for this run, or + // (b) a newer run has taken over the cell with a different executionId. The + // worker is "this run's writes only land if this run is still the active + // run on the cell." Authoritative cancel writes from `cancelWorkflowGroupRuns` + // skip the guard entirely (they don't pass `cancellationGuard`). + // + // SQL-level for atomicity: an in-process read + update would race a + // concurrent stop or rerun. The two clauses are joined by AND because + // either failing means the worker is no longer authoritative. const guard = data.cancellationGuard - // The guard rejects writes only when the DB *already* shows - // `cancelled` + matching executionId. Wrap the JSON traversals in - // `IS DISTINCT FROM` so a missing `executions[groupId]` (NULL) cleanly - // evaluates as "different" — Postgres three-valued logic would otherwise - // make the whole expression NULL and the UPDATE would mistakenly become - // a no-op for any row that has no prior execution record. const whereClause = guard ? and( eq(userTableRows.id, data.rowId), - sql`(executions->${guard.groupId}->>'status' IS DISTINCT FROM 'cancelled' OR executions->${guard.groupId}->>'executionId' IS DISTINCT FROM ${guard.executionId})` + // Reject writes that would land on top of an already-`cancelled` state + // for this same run. Wrapped in IS DISTINCT FROM so a missing exec + // (NULL) cleanly evaluates as "different" rather than NULL-poisoning. + sql`(executions->${guard.groupId}->>'status' IS DISTINCT FROM 'cancelled' OR executions->${guard.groupId}->>'executionId' IS DISTINCT FROM ${guard.executionId})`, + // Reject writes from a stale worker — the cell's active run has moved + // on. `OR exec IS NULL` lets the worker land its first `running` + // stamp on a row that has no prior exec record (initial stamp from + // the scheduler may not have committed yet). + sql`(executions->${guard.groupId} IS NULL OR executions->${guard.groupId}->>'executionId' = ${guard.executionId})` ) : eq(userTableRows.id, data.rowId) + // Apply the executions patch at the SQL level — we never overwrite the full + // executions blob, only the keys the caller explicitly patched. Without + // this, concurrent updateRow calls (e.g., a column edit and a manual + // retry's stamp) would each compute `mergedExecutions` from their own + // in-memory snapshot and the last writer wins, clobbering the other's + // exec keys. The data field still does last-writer-wins because that's + // the user's edit, but exec records are independently keyed by groupId. + const executionsExpr = buildExecutionsSqlPatch(effectiveExecutionsPatch) const updated = await db .update(userTableRows) - .set({ data: mergedData, executions: mergedExecutions, updatedAt: now }) + .set({ + data: mergedData, + ...(executionsExpr ? { executions: executionsExpr } : {}), + updatedAt: now, + }) .where(whereClause) .returning({ id: userTableRows.id }) @@ -1680,15 +1753,9 @@ export async function updateRow( table.schema, requestId ) - // Notify BEFORE the scheduler so this event (carrying the user's data - // update with pre-scheduler executions) reaches the client first. The - // scheduler then fires its own per-write notifications with `pending`/ - // `running` execution state — those land last, so the cached executions - // end on the live state instead of being clobbered by a stale envelope. - notifyTableRowUpdated(data.tableId, updatedRow) // Awaited (not `void`) so cell tasks dispatch their cascade before the // trigger.dev worker tears down on `run()` resolve. - await scheduleWorkflowGroupRuns(table, [updatedRow]) + if (!data.skipScheduler) await scheduleRunsForRows(table, [updatedRow]) return updatedRow } @@ -1730,7 +1797,6 @@ export async function deleteRow( }) logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`) - notifyTableRowDeleted(tableId, rowId) } /** @@ -1851,8 +1917,7 @@ export async function updateRowsByFilter( table.schema, requestId ) - for (const row of updatedRows) notifyTableRowUpdated(data.tableId, row) - void scheduleWorkflowGroupRuns(table, updatedRows) + void scheduleRunsForRows(table, updatedRows) return { affectedCount: matchingRows.length, @@ -1906,11 +1971,19 @@ export async function batchUpdateRows( rowId: string mergedData: RowData mergedExecutions: RowExecutions + executionsPatch?: Record }> = [] for (const update of data.updates) { const existing = existingMap.get(update.rowId)! const merged = { ...existing.data, ...update.data } - const mergedExecutions = applyExecutionsPatch(existing.executions, update.executionsPatch) + // Auto-clear exec records for workflow output columns the user just + // wiped — same rationale as `updateRow`. + const effectiveExecutionsPatch = deriveExecClearsForDataPatch( + update.data, + table.schema, + update.executionsPatch + ) + const mergedExecutions = applyExecutionsPatch(existing.executions, effectiveExecutionsPatch) const sizeValidation = validateRowSize(merged) if (!sizeValidation.valid) { @@ -1922,7 +1995,12 @@ export async function batchUpdateRows( throw new Error(`Row ${update.rowId}: ${schemaValidation.errors.join(', ')}`) } - mergedUpdates.push({ rowId: update.rowId, mergedData: merged, mergedExecutions }) + mergedUpdates.push({ + rowId: update.rowId, + mergedData: merged, + mergedExecutions, + executionsPatch: effectiveExecutionsPatch, + }) } const uniqueColumns = getUniqueColumns(table.schema) @@ -1946,12 +2024,20 @@ export async function batchUpdateRows( await setTableTxTimeouts(trx, { statementMs: 60_000 }) for (let i = 0; i < mergedUpdates.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) { const batch = mergedUpdates.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE) - const updatePromises = batch.map(({ rowId, mergedData, mergedExecutions }) => - trx + // Same as `updateRow`: patch executions at the SQL level when a patch + // is set, so concurrent writers don't clobber each other's keys via + // last-writer-wins on the full jsonb blob. + const updatePromises = batch.map(({ rowId, mergedData, executionsPatch }) => { + const executionsExpr = buildExecutionsSqlPatch(executionsPatch) + return trx .update(userTableRows) - .set({ data: mergedData, executions: mergedExecutions, updatedAt: now }) + .set({ + data: mergedData, + ...(executionsExpr ? { executions: executionsExpr } : {}), + updatedAt: now, + }) .where(eq(userTableRows.id, rowId)) - ) + }) await Promise.all(updatePromises) } }) @@ -1980,11 +2066,7 @@ export async function batchUpdateRows( table.schema, requestId ) - // Same ordering as `updateRow`: notify with the user's data update first - // so the scheduler's later per-write notifications (pending/running) land - // last and stick in the client cache. - for (const row of updatedRowsForTrigger) notifyTableRowUpdated(data.tableId, row) - void scheduleWorkflowGroupRuns(table, updatedRowsForTrigger) + if (!data.skipScheduler) void scheduleRunsForRows(table, updatedRowsForTrigger) return { affectedCount: mergedUpdates.length, @@ -2095,7 +2177,6 @@ export async function deleteRowsByFilter( }) logger.info(`[${requestId}] Deleted ${matchingRows.length} rows from table ${data.tableId}`) - for (const id of rowIds) notifyTableRowDeleted(data.tableId, id) return { affectedCount: matchingRows.length, @@ -2153,7 +2234,6 @@ export async function deleteRowsByIds( const missingRowIds = uniqueRequestedRowIds.filter((id) => !deletedIdSet.has(id)) logger.info(`[${requestId}] Deleted ${deletedIds.length} rows by ID from table ${data.tableId}`) - for (const id of deletedIds) notifyTableRowDeleted(data.tableId, id) return { deletedCount: deletedIds.length, @@ -2221,23 +2301,17 @@ export async function renameColumn( const renamedDeps = group.dependencies?.columns?.map((d) => d === actualOldName ? data.newName : d ) - const renamedGroupDeps = group.dependencies?.workflowGroups - const next = { + return { ...group, outputs: renamedOutputs, - dependencies: { - ...(renamedDeps ? { columns: renamedDeps } : {}), - ...(renamedGroupDeps ? { workflowGroups: renamedGroupDeps } : {}), - }, + ...(renamedDeps ? { dependencies: { columns: renamedDeps } } : {}), } - return next }) const updatedSchema: TableSchema = { ...schema, columns: updatedColumns, ...(updatedGroups.length > 0 ? { workflowGroups: updatedGroups } : {}), } - assertValidSchema(updatedSchema, table.metadata?.columnOrder) const metadata = table.metadata as TableMetadata | null let updatedMetadata = metadata @@ -2251,6 +2325,11 @@ export async function renameColumn( columnOrder: updatedMetadata.columnOrder.map((n) => (n === actualOldName ? data.newName : n)), } } + // Validate against the *post-rename* column order. The schema's workflow + // group outputs already reference the new name, so checking against the old + // columnOrder makes the renamed output look "missing" from its group and + // falsely flags the remaining siblings as non-contiguous. + assertValidSchema(updatedSchema, updatedMetadata?.columnOrder) const now = new Date() const statementMs = scaledStatementTimeoutMs(table.rowCount ?? 0, { @@ -2265,8 +2344,10 @@ export async function renameColumn( .set({ schema: updatedSchema, metadata: updatedMetadata, updatedAt: now }) .where(eq(userTableDefinitions.id, data.tableId)) + // All bindings parameterized — `data->` accepts a text parameter for the + // key, no need to drop into `sql.raw` with hand-rolled quote escaping. await trx.execute( - sql`UPDATE user_table_rows SET data = data - ${actualOldName}::text || jsonb_build_object(${data.newName}::text, data->${sql.raw(`'${actualOldName.replace(/'/g, "''")}'`)}) WHERE table_id = ${data.tableId} AND data ? ${actualOldName}::text` + sql`UPDATE user_table_rows SET data = data - ${actualOldName}::text || jsonb_build_object(${data.newName}::text, data->${actualOldName}::text) WHERE table_id = ${data.tableId} AND data ? ${actualOldName}::text` ) }) @@ -2312,48 +2393,21 @@ export async function deleteColumn( // Drop this column's reference from every group's outputs and `columns` // dependency. If the column is the last output of its parent group, the - // group itself is also removed (a group with zero outputs is invalid), - // and any OTHER group depending on this group has the dep cleared too. + // group itself is also removed (a group with zero outputs is invalid). let groupRemovedId: string | null = null - let updatedGroups = (schema.workflowGroups ?? []).map((group) => { - let next = group - if (ownerGroupId && group.id === ownerGroupId) { - const remaining = group.outputs.filter((o) => o.columnName !== actualName) - if (remaining.length === 0) { - groupRemovedId = group.id - } - next = { ...next, outputs: remaining } - } - const filtered = next.dependencies?.columns?.filter((d) => d !== actualName) - if (filtered && filtered.length !== (next.dependencies?.columns?.length ?? 0)) { - next = { - ...next, - dependencies: { - ...(filtered.length > 0 ? { columns: filtered } : {}), - ...(next.dependencies?.workflowGroups - ? { workflowGroups: next.dependencies.workflowGroups } - : {}), - }, + const updatedGroups = (schema.workflowGroups ?? []) + .map((group) => { + let next = group + if (ownerGroupId && group.id === ownerGroupId) { + const remaining = group.outputs.filter((o) => o.columnName !== actualName) + if (remaining.length === 0) { + groupRemovedId = group.id + } + next = { ...next, outputs: remaining } } - } - return next - }) - if (groupRemovedId) { - const removed = groupRemovedId - updatedGroups = updatedGroups - .filter((g) => g.id !== removed) - .map((g) => - g.dependencies?.workflowGroups - ? { - ...g, - dependencies: { - ...(g.dependencies.columns ? { columns: g.dependencies.columns } : {}), - workflowGroups: g.dependencies.workflowGroups.filter((id) => id !== removed), - }, - } - : g - ) - } + return stripGroupDeps(next, new Set([actualName])) + }) + .filter((g) => g.id !== groupRemovedId) const updatedSchema: TableSchema = { ...schema, @@ -2385,6 +2439,7 @@ export async function deleteColumn( await trx.execute( sql`UPDATE user_table_rows SET data = data - ${actualName}::text WHERE table_id = ${data.tableId} AND data ? ${actualName}::text` ) + if (groupRemovedId) await stripGroupExecutions(trx, data.tableId, [groupRemovedId]) }) logger.info(`[${requestId}] Deleted column "${actualName}" from table ${data.tableId}`) @@ -2443,21 +2498,7 @@ export async function deleteColumns( }) updatedGroups = updatedGroups .filter((g) => !removedGroupIds.has(g.id)) - .map((group) => { - const depCols = group.dependencies?.columns?.filter((d) => !namesToDelete.has(d)) - const depGroups = group.dependencies?.workflowGroups?.filter((id) => !removedGroupIds.has(id)) - const colsChanged = depCols && depCols.length !== (group.dependencies?.columns?.length ?? 0) - const groupsChanged = - depGroups && depGroups.length !== (group.dependencies?.workflowGroups?.length ?? 0) - if (!colsChanged && !groupsChanged) return group - return { - ...group, - dependencies: { - ...(depCols && depCols.length > 0 ? { columns: depCols } : {}), - ...(depGroups && depGroups.length > 0 ? { workflowGroups: depGroups } : {}), - }, - } - }) + .map((group) => stripGroupDeps(group, namesToDelete)) const updatedSchema: TableSchema = { ...schema, columns: remaining, @@ -2491,6 +2532,7 @@ export async function deleteColumns( sql`UPDATE user_table_rows SET data = data - ${name}::text WHERE table_id = ${data.tableId} AND data ? ${name}::text` ) } + await stripGroupExecutions(trx, data.tableId, removedGroupIds) }) logger.info( @@ -2536,8 +2578,6 @@ export async function updateColumnType( return table } - const escapedName = column.name.replace(/'/g, "''") - // Validate existing data is compatible with the new type const rows = await db .select({ id: userTableRows.id, data: userTableRows.data }) @@ -2546,7 +2586,7 @@ export async function updateColumnType( and( eq(userTableRows.tableId, data.tableId), sql`${userTableRows.data} ? ${column.name}`, - sql`${userTableRows.data}->>${sql.raw(`'${escapedName}'`)} IS NOT NULL` + sql`${userTableRows.data}->>${column.name}::text IS NOT NULL` ) ) @@ -2616,8 +2656,6 @@ export async function updateColumnConstraints( `Cannot change constraints on workflow-output column "${column.name}". Constraints aren't applicable to columns whose values come from workflow execution.` ) } - const escapedName = column.name.replace(/'/g, "''") - if (data.required === true && !column.required) { const [result] = await db .select({ count: count() }) @@ -2625,7 +2663,7 @@ export async function updateColumnConstraints( .where( and( eq(userTableRows.tableId, data.tableId), - sql`(NOT (${userTableRows.data} ? ${column.name}) OR ${userTableRows.data}->>${sql.raw(`'${escapedName}'`)} IS NULL)` + sql`(NOT (${userTableRows.data} ? ${column.name}) OR ${userTableRows.data}->>${column.name}::text IS NULL)` ) ) @@ -2638,7 +2676,7 @@ export async function updateColumnConstraints( if (data.unique === true && !column.unique) { const duplicates = (await db.execute( - sql`SELECT ${userTableRows.data}->>${sql.raw(`'${escapedName}'`)} AS val, count(*) AS cnt FROM ${userTableRows} WHERE table_id = ${data.tableId} AND ${userTableRows.data} ? ${column.name} AND ${userTableRows.data}->>${sql.raw(`'${escapedName}'`)} IS NOT NULL GROUP BY val HAVING count(*) > 1 LIMIT 1` + sql`SELECT ${userTableRows.data}->>${column.name}::text AS val, count(*) AS cnt FROM ${userTableRows} WHERE table_id = ${data.tableId} AND ${userTableRows.data} ? ${column.name} AND ${userTableRows.data}->>${column.name}::text IS NOT NULL GROUP BY val HAVING count(*) > 1 LIMIT 1` )) as { val: string; cnt: number }[] if (duplicates.length > 0) { @@ -2713,44 +2751,51 @@ export async function addWorkflowGroup( columns: [...schema.columns, ...data.outputColumns], workflowGroups: [...groups, data.group], } - assertValidSchema(updatedSchema, table.metadata?.columnOrder) + + // Keep `metadata.columnOrder` in sync — see `addTableColumn` for the + // invariant. New output columns get appended in the order the caller + // supplied (matches their position in `schema.columns`). + const existingOrder = table.metadata?.columnOrder + let updatedMetadata = table.metadata + if (existingOrder && existingOrder.length > 0) { + const known = new Set(existingOrder) + const append = data.outputColumns.map((c) => c.name).filter((n) => !known.has(n)) + if (append.length > 0) { + updatedMetadata = { ...table.metadata, columnOrder: [...existingOrder, ...append] } + } + } + + assertValidSchema(updatedSchema, updatedMetadata?.columnOrder) const now = new Date() await db .update(userTableDefinitions) - .set({ schema: updatedSchema, updatedAt: now }) + .set({ schema: updatedSchema, metadata: updatedMetadata, updatedAt: now }) .where(eq(userTableDefinitions.id, data.tableId)) logger.info( `[${requestId}] Added workflow group "${data.group.id}" with ${data.outputColumns.length} output column(s) to table ${data.tableId}` ) - const updatedTable: TableDefinition = { ...table, schema: updatedSchema, updatedAt: now } + const updatedTable: TableDefinition = { + ...table, + schema: updatedSchema, + metadata: updatedMetadata, + updatedAt: now, + } // Schedule existing rows so already-filled deps trigger immediately. Skipped // when the caller opted out (Mothership stages groups silently — `autoRun: // false` — so the AI can compose multiple changes without firing rows mid-edit). + // Awaited (not `void`) so the response includes the queued exec state — the + // client's post-mutation refetch otherwise lands before the stamps commit + // and the rows query polling never starts. if (data.autoRun !== false) { - void (async () => { - try { - const rowRecords = await db - .select() - .from(userTableRows) - .where(eq(userTableRows.tableId, data.tableId)) - if (rowRecords.length === 0) return - const rows: TableRow[] = rowRecords.map((r) => ({ - id: r.id, - data: r.data as RowData, - executions: (r.executions as RowExecutions) ?? {}, - position: r.position, - createdAt: r.createdAt, - updatedAt: r.updatedAt, - })) - await scheduleWorkflowGroupRuns(updatedTable, rows) - } catch (err) { - logger.error(`[${requestId}] Failed to schedule runs after group add:`, err) - } - })() + try { + await scheduleRunsForTable(updatedTable) + } catch (err) { + logger.error(`[${requestId}] Failed to schedule runs after group add:`, err) + } } return updatedTable @@ -2779,12 +2824,90 @@ export async function updateWorkflowGroup( } const group = groups[groupIndex] - const newOutputs = data.outputs ?? group.outputs + // Apply `mappingUpdates` first: each entry repoints an existing output's + // `(blockId, path)` while preserving the column. We patch the **old** view + // of outputs so the downstream `(blockId, path)`-keyed diff doesn't see the + // swap as a remove+add. The corresponding row data is cleared after the + // schema write so stale values from the old source don't linger. + const mappingUpdates = data.mappingUpdates ?? [] + const remappedColumnNames = new Set() + // Per-column type override resolved from the new mapping's leaf type. Only + // populated when a remap actually changes the column's type — keeps the + // schema patch a no-op when the user repoints to an output of the same + // type. Falls back to leaving the existing type alone if the workflow or + // its target output can't be resolved (workflow deleted, block removed). + const remappedColumnTypes = new Map() + let oldOutputs = group.outputs + if (mappingUpdates.length > 0) { + const updateByName = new Map(mappingUpdates.map((u) => [u.columnName, u])) + for (const u of mappingUpdates) { + const exists = oldOutputs.some((o) => o.columnName === u.columnName) + if (!exists) { + throw new Error( + `Mapping update for unknown column "${u.columnName}" (group ${data.groupId}).` + ) + } + } + oldOutputs = oldOutputs.map((o) => { + const u = updateByName.get(o.columnName) + if (!u) return o + remappedColumnNames.add(o.columnName) + return { ...o, blockId: u.blockId, path: u.path } + }) + + // Resolve the new leaf type for each remap so the column's declared type + // matches what the new mapping produces. Without this, a string→number + // remap would keep `type: 'string'` and validateRowAgainstSchema would + // reject every backfilled value. + try { + const [ + { loadWorkflowFromNormalizedTables }, + { flattenWorkflowOutputs }, + { columnTypeForLeaf }, + ] = await Promise.all([ + import('@/lib/workflows/persistence/utils'), + import('@/lib/workflows/blocks/flatten-outputs'), + import('./column-naming'), + ]) + const targetWorkflowId = data.workflowId ?? group.workflowId + const normalized = await loadWorkflowFromNormalizedTables(targetWorkflowId) + if (normalized) { + const blocks = Object.values(normalized.blocks ?? {}).map((b) => ({ + id: b.id, + type: b.type, + name: b.name, + triggerMode: (b as { triggerMode?: boolean }).triggerMode, + subBlocks: b.subBlocks as Record | undefined, + })) + const flattened = flattenWorkflowOutputs(blocks, normalized.edges ?? []) + const flatByKey = new Map(flattened.map((f) => [`${f.blockId}::${f.path}`, f])) + const colByName = new Map(schema.columns.map((c) => [c.name, c])) + for (const u of mappingUpdates) { + const match = flatByKey.get(`${u.blockId}::${u.path}`) + if (!match) continue + const newType = columnTypeForLeaf(match.leafType) + const oldType = colByName.get(u.columnName)?.type + if (newType && newType !== oldType) { + remappedColumnTypes.set(u.columnName, newType) + } + } + } + } catch (err) { + logger.warn( + `[${requestId}] Could not resolve new leaf types for remap on group ${data.groupId}; leaving column types unchanged:`, + err + ) + } + } + + // If the caller passed `outputs`, that's the new full set. If only + // `mappingUpdates` was sent, the new set is the remapped old set. + const newOutputs = data.outputs ?? oldOutputs const oldKey = (o: WorkflowGroupOutput) => `${o.blockId}::${o.path}` - const oldByKey = new Map(group.outputs.map((o) => [oldKey(o), o])) + const oldByKey = new Map(oldOutputs.map((o) => [oldKey(o), o])) const newByKey = new Map(newOutputs.map((o) => [oldKey(o), o])) - const removed = group.outputs.filter((o) => !newByKey.has(oldKey(o))) + const removed = oldOutputs.filter((o) => !newByKey.has(oldKey(o))) const added = newOutputs.filter((o) => !oldByKey.has(oldKey(o))) const newColDefs = data.newOutputColumns ?? [] const newColByName = new Map(newColDefs.map((c) => [c.name, c])) @@ -2798,7 +2921,12 @@ export async function updateWorkflowGroup( } const removedColumnNames = new Set(removed.map((o) => o.columnName)) - let nextColumns = schema.columns.filter((c) => !removedColumnNames.has(c.name)) + let nextColumns = schema.columns + .filter((c) => !removedColumnNames.has(c.name)) + .map((c) => { + const newType = remappedColumnTypes.get(c.name) + return newType ? { ...c, type: newType } : c + }) if (newColDefs.length > 0) { // Splice the new column defs into the group's contiguous run rather than // appending at the end. The desired in-group order is `newOutputs` (the @@ -2835,8 +2963,13 @@ export async function updateWorkflowGroup( name: data.name ?? group.name, dependencies: data.dependencies ?? group.dependencies, outputs: newOutputs, + ...(data.autoRun !== undefined ? { autoRun: data.autoRun } : {}), } - const nextGroups = groups.map((g, i) => (i === groupIndex ? updatedGroup : g)) + // Removed outputs may be referenced as deps by sibling groups; strip those + // refs so we don't leave dangling-column deps that fail schema validation. + const nextGroups = groups + .map((g, i) => (i === groupIndex ? updatedGroup : g)) + .map((g) => (g.id === updatedGroup.id ? g : stripGroupDeps(g, removedColumnNames))) const updatedSchema: TableSchema = { ...schema, columns: nextColumns, @@ -2882,10 +3015,20 @@ export async function updateWorkflowGroup( sql`UPDATE user_table_rows SET data = data - ${name}::text WHERE table_id = ${data.tableId} AND data ? ${name}::text` ) } + // Remapped columns: clear stale values in-tx so rows the backfill can't + // repopulate (no log, no matching span output) end up empty rather than + // retaining the previous mapping's value. The backfill below then writes + // the new mapping's value into rows where it can find one. + for (const name of remappedColumnNames) { + if (removedColumnNames.has(name)) continue + await trx.execute( + sql`UPDATE user_table_rows SET data = data - ${name}::text WHERE table_id = ${data.tableId} AND data ? ${name}::text` + ) + } }) logger.info( - `[${requestId}] Updated workflow group "${data.groupId}" in table ${data.tableId} (added=${added.length}, removed=${removed.length})` + `[${requestId}] Updated workflow group "${data.groupId}" in table ${data.tableId} (added=${added.length}, removed=${removed.length}, remapped=${remappedColumnNames.size})` ) const updatedTable: TableDefinition = { @@ -2895,18 +3038,22 @@ export async function updateWorkflowGroup( updatedAt: now, } - // Backfill the new outputs from execution logs so already-completed group - // runs surface the just-added columns without re-running the workflow. - // Awaited so the response only returns once row data is consistent — the - // client then refetches and sees the backfilled values immediately. A failed - // backfill is logged but doesn't fail the whole request, since the schema + // Backfill from saved execution logs so already-completed group runs surface + // the schema changes without re-running the workflow. Two passes: + // - added outputs (new columns): never overwrite hand-edited values. + // - remapped outputs (existing column re-pointed): overwrite, since the + // new mapping is the source of truth and the user expects the cell to + // refresh to the new output's value. + // Awaited so the response only returns once row data is consistent. A + // failed backfill is logged but doesn't fail the request — the schema // change has already committed. if (added.length > 0) { try { - await backfillAddedGroupOutputs({ + await backfillGroupOutputsFromLogs({ table: updatedTable, groupId: data.groupId, - addedOutputs: added, + outputs: added, + overwrite: false, requestId, }) } catch (err) { @@ -2916,6 +3063,35 @@ export async function updateWorkflowGroup( ) } } + if (remappedColumnNames.size > 0) { + const remappedOutputs = newOutputs.filter((o) => remappedColumnNames.has(o.columnName)) + try { + await backfillGroupOutputsFromLogs({ + table: updatedTable, + groupId: data.groupId, + outputs: remappedOutputs, + overwrite: true, + requestId, + }) + } catch (err) { + logger.warn( + `[${requestId}] Remap backfill from execution logs failed for ${data.tableId} group ${data.groupId}:`, + err + ) + } + } + + // autoRun toggled false → true: fire deps-satisfied rows now. Mirrors the + // post-add scheduling path so re-enabling auto-fire doesn't require manual + // run clicks for rows that are already eligible. Awaited so the post- + // mutation refetch sees the queued exec stamps. + if (group.autoRun === false && data.autoRun === true) { + try { + await scheduleRunsForTable(updatedTable, { groupId: data.groupId }) + } catch (err) { + logger.error(`[${requestId}] Failed to schedule runs after autoRun toggled on:`, err) + } + } return updatedTable } @@ -3101,32 +3277,35 @@ export async function addWorkflowGroupOutput( `[${requestId}] Added output "${columnName}" (${newColDef.type}) to workflow group "${data.groupId}" in table ${data.tableId}` ) - // Backfill: re-run the group on every dep-satisfied row (including ones - // that previously completed) so the new column actually gets populated. - // Adding an output without values defeats the point — the user wants the - // values, not just the empty header. - void (async () => { - try { - const { triggerWorkflowGroupRun } = await import('./workflow-columns') - const { triggered } = await triggerWorkflowGroupRun({ - tableId: data.tableId, - groupId: data.groupId, - workspaceId: table.workspaceId, - mode: 'all', - requestId, - }) - logger.info( - `[${requestId}] Backfilled ${triggered} row(s) after adding output "${columnName}"` - ) - } catch (err) { - logger.error( - `[${requestId}] Failed to backfill rows after adding output "${columnName}":`, - err - ) - } - })() + // Backfill from saved execution logs — same flow `updateWorkflowGroup` + // uses for added outputs. Reads each row's saved trace spans for the + // group's executionId and writes the new output's value back. Existing + // rows that have hand-edited values are left alone (overwrite: false). + // Cheap compared to re-running the workflow on every row, which is what + // an earlier version of this code did — that mistakenly fanned out N + // workflow-group-cell jobs and burned compute the user didn't ask for. + const updatedTable: TableDefinition = { + ...table, + schema: updatedSchema, + metadata: updatedMetadata, + updatedAt: now, + } + try { + await backfillGroupOutputsFromLogs({ + table: updatedTable, + groupId: data.groupId, + outputs: [newOutput], + overwrite: false, + requestId, + }) + } catch (err) { + logger.warn( + `[${requestId}] Backfill from execution logs failed for ${data.tableId} group ${data.groupId} after adding output "${columnName}":`, + err + ) + } - return { ...table, schema: updatedSchema, metadata: updatedMetadata, updatedAt: now } + return updatedTable } /** @@ -3217,17 +3396,11 @@ export async function deleteWorkflowGroup( } const removedColumnNames = new Set(group.outputs.map((o) => o.columnName)) + // Removed group's output columns may be referenced as deps by sibling groups. + // Strip those refs so we don't leave dangling-column deps behind. const nextGroups = groups .filter((g) => g.id !== data.groupId) - .map((g) => ({ - ...g, - dependencies: g.dependencies?.workflowGroups - ? { - ...(g.dependencies.columns ? { columns: g.dependencies.columns } : {}), - workflowGroups: g.dependencies.workflowGroups.filter((id) => id !== data.groupId), - } - : g.dependencies, - })) + .map((g) => stripGroupDeps(g, removedColumnNames)) const updatedSchema: TableSchema = { ...schema, columns: schema.columns.filter((c) => !removedColumnNames.has(c.name)), @@ -3255,9 +3428,7 @@ export async function deleteWorkflowGroup( sql`UPDATE user_table_rows SET data = data - ${name}::text WHERE table_id = ${data.tableId} AND data ? ${name}::text` ) } - await trx.execute( - sql`UPDATE user_table_rows SET executions = executions - ${data.groupId}::text WHERE table_id = ${data.tableId} AND executions ? ${data.groupId}::text` - ) + await stripGroupExecutions(trx, data.tableId, [data.groupId]) }) logger.info(`[${requestId}] Deleted workflow group "${data.groupId}" from table ${data.tableId}`) @@ -3291,14 +3462,28 @@ function findSpanByBlockId( return undefined } -async function backfillAddedGroupOutputs(opts: { +/** + * Walks completed group executions and pulls each target output's value out of + * the workflow's saved trace spans, writing it back into row data. Used in two + * spots: + * + * - **added** outputs (new columns added to an existing group): `overwrite` + * is false, so rows with a hand-edited value already in the column are + * left alone. + * - **remapped** outputs (existing column re-pointed at a different + * `(blockId, path)`): `overwrite` is true — the new mapping is the source + * of truth, and the user expects the column to refresh to the new + * output's value rather than retain the stale old one. + */ +async function backfillGroupOutputsFromLogs(opts: { table: TableDefinition groupId: string - addedOutputs: WorkflowGroupOutput[] + outputs: WorkflowGroupOutput[] + overwrite: boolean requestId: string }): Promise { - const { table, groupId, addedOutputs, requestId } = opts - if (addedOutputs.length === 0) return + const { table, groupId, outputs, overwrite, requestId } = opts + if (outputs.length === 0) return const { pluckByPath } = await import('./pluck') @@ -3342,8 +3527,8 @@ async function backfillAddedGroupOutputs(opts: { const dataPatch: RowData = {} let mutated = false - for (const out of addedOutputs) { - if ((r.data as RowData)[out.columnName] !== undefined) continue + for (const out of outputs) { + if (!overwrite && (r.data as RowData)[out.columnName] !== undefined) continue const span = findSpanByBlockId(log.traceSpans, out.blockId) if (!span?.output) continue const picked = pluckByPath(span.output, out.path) @@ -3368,7 +3553,7 @@ async function backfillAddedGroupOutputs(opts: { ) logger.info( - `[${requestId}] Backfilled ${updates.length} row(s) for group "${groupId}" in table ${table.id}` + `[${requestId}] Backfilled ${updates.length} row(s) for group "${groupId}" in table ${table.id} (${overwrite ? 'remapped' : 'added'})` ) } diff --git a/apps/sim/lib/table/types.ts b/apps/sim/lib/table/types.ts index e8976275aef..5d6b90d8413 100644 --- a/apps/sim/lib/table/types.ts +++ b/apps/sim/lib/table/types.ts @@ -44,15 +44,13 @@ export interface WorkflowGroupOutput { } export interface WorkflowGroupDependencies { - /** Plain columns that must be non-empty before this group runs. */ - columns?: string[] /** - * Other workflow groups that must reach `status: completed` before this - * group runs. The dep graph is a first-class concept — you depend on a - * producing group, never on a sibling output value (which can legitimately - * be null on success). + * Columns that must be non-empty before this group runs. Workflow output + * columns count too — once an upstream group fills its output column, any + * downstream group depending on that column becomes eligible. The user + * model is uniform: deps are columns, not group-completion edges. */ - workflowGroups?: string[] + columns?: string[] } export interface WorkflowGroup { @@ -62,6 +60,13 @@ export interface WorkflowGroup { name?: string dependencies?: WorkflowGroupDependencies outputs: WorkflowGroupOutput[] + /** + * When `false`, the group never auto-fires from the scheduler — it can only + * be triggered manually via the "Run" actions. Defaults to `true` so + * existing groups keep firing on dep satisfaction. Persisted alongside the + * group definition; the scheduler reads it in `isGroupEligible`. + */ + autoRun?: boolean } /** @@ -70,7 +75,7 @@ export interface WorkflowGroup { * values land in `row.data` directly. */ export interface RowExecutionMetadata { - status: 'pending' | 'running' | 'completed' | 'error' | 'cancelled' + status: 'pending' | 'queued' | 'running' | 'completed' | 'error' | 'cancelled' executionId: string | null /** * Async-job id (e.g. trigger.dev run id) for the in-flight execution. @@ -303,6 +308,14 @@ export interface UpdateRowData { * state. `updateRow` returns `null` when the guard rejects the write. */ cancellationGuard?: { groupId: string; executionId: string } + /** + * When true, the post-write `scheduleRunsForRows` call is skipped. Used by + * the cancel path (which is tearing rows down, not waking them up) and by + * the manual-run path (which fires its own `scheduleRunsForRows` with + * `isManualRun: true` and doesn't want a duplicate auto-fire pass on the + * cleared cells). Default false: every other write fires the reactor. + */ + skipScheduler?: boolean } export interface BulkUpdateData { @@ -321,6 +334,8 @@ export interface BatchUpdateByIdData { executionsPatch?: Record }> workspaceId: string + /** Same semantics as `UpdateRowData.skipScheduler`. */ + skipScheduler?: boolean } export interface BulkDeleteData { @@ -401,6 +416,15 @@ export interface UpdateWorkflowGroupData { outputs?: WorkflowGroupOutput[] /** Column definitions for any newly-added outputs. */ newOutputColumns?: ColumnDefinition[] + /** + * Per-column mapping swaps: keep the existing column, repoint it at a new + * `(blockId, path)`. Applied before the `outputs` diff and clears the + * affected columns' row data so the next run repopulates from the new + * source. + */ + mappingUpdates?: Array<{ columnName: string; blockId: string; path: string }> + /** Toggle the group's auto-run flag. Omit to leave it unchanged. */ + autoRun?: boolean } export interface DeleteWorkflowGroupData { diff --git a/apps/sim/lib/table/validation.ts b/apps/sim/lib/table/validation.ts index dbea6a32c40..8f6d2757d4e 100644 --- a/apps/sim/lib/table/validation.ts +++ b/apps/sim/lib/table/validation.ts @@ -276,7 +276,7 @@ export function getUniqueColumns(schema: TableSchema): ColumnDefinition[] { export function validateUniqueConstraints( data: RowData, schema: TableSchema, - existingRows: { id: string; data: RowData }[], + existingRows: { id: string; data: RowData; position?: number }[], excludeRowId?: string ): ValidationResult { const errors: string[] = [] @@ -297,8 +297,10 @@ export function validateUniqueConstraints( }) if (duplicate) { + const rowLabel = + typeof duplicate.position === 'number' ? `row ${duplicate.position + 1}` : duplicate.id errors.push( - `Column "${column.name}" must be unique. Value "${value}" already exists in row ${duplicate.id}` + `Column "${column.name}" must be unique. Value "${value}" already exists in ${rowLabel}` ) } } @@ -364,14 +366,14 @@ export async function checkUniqueConstraintsDb( : baseCondition const conflictingRow = await db - .select({ id: userTableRows.id }) + .select({ id: userTableRows.id, position: userTableRows.position }) .from(userTableRows) .where(whereClause) .limit(1) if (conflictingRow.length > 0) { errors.push( - `Column "${condition.column.name}" must be unique. Value "${condition.value}" already exists in row ${conflictingRow[0].id}` + `Column "${condition.column.name}" must be unique. Value "${condition.value}" already exists in row ${conflictingRow[0].position + 1}` ) } } @@ -474,6 +476,7 @@ export async function checkBatchUniqueConstraintsDb( .select({ id: userTableRows.id, data: userTableRows.data, + position: userTableRows.position, }) .from(userTableRows) .where(and(eq(userTableRows.tableId, tableId), or(...valueConditions))) @@ -504,7 +507,7 @@ export async function checkBatchUniqueConstraintsDb( rowErrors.push(rowError) } - const errorMsg = `Column "${columnName}" must be unique. Value "${rowValue}" already exists in row ${conflict.id}` + const errorMsg = `Column "${columnName}" must be unique. Value "${rowValue}" already exists in row ${conflict.position + 1}` if (!rowError.errors.includes(errorMsg)) { rowError.errors.push(errorMsg) } diff --git a/apps/sim/lib/table/workflow-columns.ts b/apps/sim/lib/table/workflow-columns.ts index b50ae47d081..71b091c2df1 100644 --- a/apps/sim/lib/table/workflow-columns.ts +++ b/apps/sim/lib/table/workflow-columns.ts @@ -6,11 +6,13 @@ */ import { db } from '@sim/db' -import { userTableRows } from '@sim/db/schema' +import { pausedExecutions, userTableRows } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { toError } from '@sim/utils/errors' import { generateId } from '@sim/utils/id' import { and, asc, eq, inArray, sql } from 'drizzle-orm' +import { getJobQueue } from '@/lib/core/async-jobs/config' +import type { EnqueueOptions } from '@/lib/core/async-jobs/types' import { buildCancelledExecution, writeWorkflowGroupState } from '@/lib/table/cell-write' import type { RowData, @@ -24,70 +26,123 @@ import type { const logger = createLogger('WorkflowGroupScheduler') +import { areGroupDepsSatisfied, areOutputsFilled, isExecInFlight } from './deps' + +export { + areGroupDepsSatisfied, + areOutputsFilled, + getUnmetGroupDeps, + isExecInFlight, + optimisticallyScheduleNewlyEligibleGroups, + type UnmetDeps, +} from './deps' + /** - * Returns true when every dependency this group needs is filled. Plain - * columns are filled when their value is non-empty; upstream groups are - * filled when `executions[gid].status === 'completed'`. Used both by the - * scheduler's eligibility check and by the manual "Run group" route, which - * needs the same gate WITHOUT the in-flight / terminal-state check. + * Per-(row, group) eligibility for both the auto-fire reactor and manual + * runs. Manual runs bypass the `autoRun === false` skip, and additionally + * bypass the dep check for `autoRun === false` groups (those are user-model + * "no deps, manual only"). + * + * "Completed" status is treated as stale when any output cell is empty — the + * cells win over the exec metadata, so deleting an output value re-arms the + * row for the cascade and for manual incomplete-mode runs. */ -export function areGroupDepsSatisfied(group: WorkflowGroup, row: TableRow): boolean { - const deps = group.dependencies ?? {} - for (const colName of deps.columns ?? []) { - const value = row.data[colName] - if (value === null || value === undefined || value === '') return false - } - for (const gid of deps.workflowGroups ?? []) { - if (row.executions?.[gid]?.status !== 'completed') return false - } - return true -} - /** - * Per-(row, group) eligibility: returns true if a cell job should be enqueued - * for this pair right now. Skip when the group is in flight (`running`, or - * `pending` with a `jobId` already stamped) or in a terminal state. Plain - * `pending` without a jobId is the "ready to dispatch" state — the run route - * sets it and the scheduler is what actually enqueues the job. + * Reason codes the eligibility predicate emits. Stable strings so the caller + * can aggregate skip reasons into one summary log per scheduler call instead + * of allocating a per-cell debug line. */ -export function isGroupEligible(group: WorkflowGroup, row: TableRow): boolean { +export type EligibilityReason = + | 'eligible' + | 'autoRun-off' + | 'in-flight' + | 'completed-on-auto' + | 'error-on-auto' + | 'completed-on-incomplete' + | 'manual-bypass' + | 'deps-unmet' + +export function classifyEligibility( + group: WorkflowGroup, + row: TableRow, + opts?: { isManualRun?: boolean; mode?: 'all' | 'incomplete' } +): EligibilityReason { + const isManualRun = opts?.isManualRun ?? false + const mode = opts?.mode ?? 'all' + + if (group.autoRun === false && !isManualRun) return 'autoRun-off' + const exec = row.executions?.[group.id] + if (isExecInFlight(exec)) return 'in-flight' const status = exec?.status - if ( - status === 'running' || - status === 'completed' || - status === 'error' || - status === 'cancelled' - ) { - return false - } - if (status === 'pending' && exec?.jobId) { - return false - } - return areGroupDepsSatisfied(group, row) + + const completedAndFilled = status === 'completed' && areOutputsFilled(group, row) + if (!isManualRun && completedAndFilled) return 'completed-on-auto' + // Auto-fire skips `error` to avoid infinite-retry loops on a deterministic + // failure. `cancelled` is left runnable — cancellation is user-initiated. + if (!isManualRun && status === 'error') return 'error-on-auto' + if (mode === 'incomplete' && completedAndFilled) return 'completed-on-incomplete' + + if (isManualRun && group.autoRun === false) return 'manual-bypass' + return areGroupDepsSatisfied(group, row) ? 'eligible' : 'deps-unmet' +} + +export function isGroupEligible( + group: WorkflowGroup, + row: TableRow, + opts?: { isManualRun?: boolean; mode?: 'all' | 'incomplete' } +): boolean { + return classifyEligibility(group, row, opts) === 'eligible' } /** - * Iterates workflow groups × rows and enqueues eligible cell jobs. Safe to - * call after any row-write; errors are logged. Concurrency is bounded by the - * trigger.dev queue (`concurrencyKey: tableId`), so this just enqueues. + * Shared options for the three `scheduleRuns*` entry points. `isManualRun` + * flips two gates in the eligibility predicate so a manual click can re-run + * terminal states and bypass the autoRun=false skip. */ -export async function scheduleWorkflowGroupRuns( +export interface ScheduleOpts { + groupId?: string + groupIds?: string[] + isManualRun?: boolean + mode?: 'all' | 'incomplete' +} + +/** + * Re-evaluate eligibility on these specific rows and enqueue runnable cells. + * The hot path: every row write (insert / update / cascade) calls this with the + * just-written row(s). + */ +export async function scheduleRunsForRows( table: TableDefinition, - rows: TableRow[] -): Promise { + rows: TableRow[], + opts?: ScheduleOpts +): Promise<{ triggered: number }> { try { - const groups = table.schema.workflowGroups ?? [] - if (groups.length === 0) return - if (rows.length === 0) return + const allGroups = table.schema.workflowGroups ?? [] + if (allGroups.length === 0) return { triggered: 0 } + if (rows.length === 0) return { triggered: 0 } + + const groupIdFilter = opts?.groupIds + ? new Set(opts.groupIds) + : opts?.groupId + ? new Set([opts.groupId]) + : null + const groups = groupIdFilter ? allGroups.filter((g) => groupIdFilter.has(g.id)) : allGroups + if (groups.length === 0) return { triggered: 0 } const orderedRows = rows.length <= 1 ? rows : [...rows].sort((a, b) => a.position - b.position) const pendingRuns: RunGroupCellOptions[] = [] + const reasonCounts: Partial> = {} for (const row of orderedRows) { for (const group of groups) { - if (!isGroupEligible(group, row)) continue + const reason = classifyEligibility(group, row, { + isManualRun: opts?.isManualRun, + mode: opts?.mode, + }) + reasonCounts[reason] = (reasonCounts[reason] ?? 0) + 1 + if (reason !== 'eligible') continue pendingRuns.push({ tableId: table.id, tableName: table.name, @@ -100,13 +155,122 @@ export async function scheduleWorkflowGroupRuns( } } - if (pendingRuns.length === 0) return + logger.debug( + `[Cascade] table=${table.id} rows=${rows.length} groups=${groups.length} manual=${opts?.isManualRun ?? false} mode=${opts?.mode ?? 'all'} reasons=${JSON.stringify(reasonCounts)}` + ) + + if (pendingRuns.length === 0) return { triggered: 0 } logger.info(`Scheduling ${pendingRuns.length} workflow group cell run(s) for table=${table.id}`) - await Promise.allSettled(pendingRuns.map((opts) => runWorkflowGroupCell(opts))) + const queue = await getJobQueue() + const { executeWorkflowGroupCellJob } = await import('@/background/workflow-column-execution') + const items = pendingRuns.map((runOpts) => ({ + payload: runOpts, + options: { + metadata: { + workflowId: runOpts.workflowId, + workspaceId: runOpts.workspaceId, + correlation: { + executionId: runOpts.executionId, + requestId: `wfgrp-${runOpts.executionId}`, + source: 'workflow' as const, + workflowId: runOpts.workflowId, + triggerType: 'table', + }, + }, + concurrencyKey: runOpts.tableId, + concurrencyLimit: TABLE_CONCURRENCY_LIMIT, + tags: [`tableId:${runOpts.tableId}`, `rowId:${runOpts.rowId}`, `group:${runOpts.groupId}`], + runner: executeWorkflowGroupCellJob as EnqueueOptions['runner'], + }, + })) + + let jobIds: string[] + try { + jobIds = await queue.batchEnqueue('workflow-group-cell', items) + } catch (err) { + logger.error(`Batch enqueue failed for table=${table.id}:`, err) + await Promise.allSettled( + pendingRuns.map((runOpts) => + writeWorkflowGroupState(runOpts, { + executionState: { + status: 'error', + executionId: runOpts.executionId, + jobId: null, + workflowId: runOpts.workflowId, + error: toError(err).message, + }, + }) + ) + ) + return { triggered: 0 } + } + + // Stamp `queued` in chunks of `TABLE_CONCURRENCY_LIMIT`. Within a chunk we + // parallelize the writes (no ordering constraint); across chunks we await + // serially so trigger.dev still picks rows up in submission order — the + // concurrency cap means at most one chunk is in flight per table anyway. + for (let i = 0; i < pendingRuns.length; i += TABLE_CONCURRENCY_LIMIT) { + const chunk = pendingRuns.slice(i, i + TABLE_CONCURRENCY_LIMIT) + const ids = jobIds.slice(i, i + TABLE_CONCURRENCY_LIMIT) + await Promise.all(chunk.map((run, j) => stampQueuedOrCancel(queue, run, ids[j]))) + } + return { triggered: pendingRuns.length } } catch (err) { - logger.error('scheduleWorkflowGroupRuns failed:', err) + logger.error('scheduleRunsForRows failed:', err) + return { triggered: 0 } + } +} + +/** + * Re-evaluate eligibility on every row of the table. Used after schema changes + * (workflow group added, autoRun toggled on) where we don't have a list of + * just-written rows but need to fire any newly-eligible (row × group) pair. + */ +export async function scheduleRunsForTable( + table: TableDefinition, + opts?: ScheduleOpts +): Promise<{ triggered: number }> { + const rows = await fetchAllRows(table.id) + return scheduleRunsForRows(table, rows, opts) +} + +/** + * Re-evaluate eligibility on the rows with these ids. Sugar for callers that + * have row ids but not materialized rows. + */ +export async function scheduleRunsForRowIds( + table: TableDefinition, + rowIds: string[], + opts?: ScheduleOpts +): Promise<{ triggered: number }> { + if (rowIds.length === 0) return { triggered: 0 } + const rows = await fetchRowsByIds(table.id, rowIds) + return scheduleRunsForRows(table, rows, opts) +} + +async function fetchAllRows(tableId: string): Promise { + const records = await db.select().from(userTableRows).where(eq(userTableRows.tableId, tableId)) + return records.map(toTableRow) +} + +async function fetchRowsByIds(tableId: string, rowIds: string[]): Promise { + const records = await db + .select() + .from(userTableRows) + .where(and(eq(userTableRows.tableId, tableId), inArray(userTableRows.id, rowIds))) + return records.map(toTableRow) +} + +function toTableRow(r: typeof userTableRows.$inferSelect): TableRow { + return { + id: r.id, + data: r.data as RowData, + executions: (r.executions as RowExecutions) ?? {}, + position: r.position, + createdAt: r.createdAt, + updatedAt: r.updatedAt, } } @@ -120,127 +284,38 @@ interface RunGroupCellOptions { executionId: string } -/** - * Enqueues a workflow-group cell run as a `workflow-group-cell` async job - * and writes `running` (with the returned `jobId`) onto the row's - * `executions[groupId]`. The actual workflow execution and terminal write - * happen inside the cell task body. Cancellation is authoritative via - * `cancelWorkflowGroupRuns`. - */ -export async function runWorkflowGroupCell(opts: RunGroupCellOptions): Promise { - const { tableId, tableName, rowId, groupId, workflowId, workspaceId, executionId } = opts - - const { getJobQueue, shouldExecuteInline } = await import('@/lib/core/async-jobs/config') - const cellCtx = { tableId, rowId, workspaceId, groupId, executionId } - - const taskPayload = { - tableId, - tableName, - rowId, - groupId, - workflowId, - workspaceId, - executionId, - } - let jobId: string - let queue: Awaited> - try { - queue = await getJobQueue() - jobId = await queue.enqueue('workflow-group-cell', taskPayload, { - metadata: { - workflowId, - workspaceId, - correlation: { - executionId, - requestId: `wfgrp-${executionId}`, - source: 'workflow', - workflowId, - triggerType: 'table', - }, - }, - // Per-table sub-queue throttles cells within a table without blocking other tables. - concurrencyKey: tableId, - tags: [`tableId:${tableId}`, `rowId:${rowId}`, `group:${groupId}`], - }) - } catch (err) { - const message = toError(err).message - logger.error( - `Failed to enqueue workflow-group-cell (table=${tableId} row=${rowId} group=${groupId}):`, - err - ) - await writeWorkflowGroupState(cellCtx, { - executionState: { - status: 'error', - executionId, - jobId: null, - workflowId, - error: message, - }, - }) - return - } +/** Per-table concurrency cap. Mirrors trigger.dev's `concurrencyLimit: 20`. */ +const TABLE_CONCURRENCY_LIMIT = 20 - // Single post-enqueue write: stamps `running` + jobId so the cancel API can - // reach this run from any pod. If cancel won the race the helper bails and - // we abort the just-enqueued job. +async function stampQueuedOrCancel( + queue: Awaited>, + opts: RunGroupCellOptions, + jobId: string +): Promise { let stampResult: 'wrote' | 'skipped' = 'wrote' try { - stampResult = await writeWorkflowGroupState(cellCtx, { + stampResult = await writeWorkflowGroupState(opts, { executionState: { - status: 'running', - executionId, + status: 'queued', + executionId: opts.executionId, jobId, - workflowId, + workflowId: opts.workflowId, error: null, }, }) } catch (err) { logger.error( - `Failed to persist jobId on group execution (table=${tableId} row=${rowId} group=${groupId}):`, + `Failed to stamp queued state (table=${opts.tableId} row=${opts.rowId} group=${opts.groupId}):`, err ) } + if (stampResult === 'skipped') { try { await queue.cancelJob(jobId) } catch (cancelErr) { logger.error(`Failed to cancel orphaned workflow-group-cell job (jobId=${jobId}):`, cancelErr) } - return - } - - // Trigger.dev disabled — execute the task body inline (DB queue records - // rows but doesn't dispatch), mirroring `workflow-execution`. - if (shouldExecuteInline()) { - const { registerInlineAbort, unregisterInlineAbort } = await import( - '@/lib/core/async-jobs/inline-abort' - ) - const abortController = new AbortController() - registerInlineAbort(jobId, abortController) - - void (async () => { - try { - const { executeWorkflowGroupCellJob } = await import( - '@/background/workflow-column-execution' - ) - await queue.startJob(jobId) - await executeWorkflowGroupCellJob(taskPayload, abortController.signal) - await queue.completeJob(jobId, null) - } catch (err) { - const message = toError(err).message - logger.error( - `Inline workflow-group-cell failed (jobId=${jobId} table=${tableId} row=${rowId} group=${groupId}):`, - err - ) - try { - await queue.markJobFailed(jobId, message) - } catch (markErr) { - logger.error('Also failed to mark job as failed:', markErr) - } - } finally { - unregisterInlineAbort(jobId) - } - })() } } @@ -295,9 +370,11 @@ export async function cancelWorkflowGroupRuns(tableId: string, rowId?: string): let cancelledCount = 0 for (const [gid, exec] of Object.entries(executions)) { if (!groupIds.has(gid)) continue - // `pending` covers the post-reset, pre-dispatch window — a stop click - // there must still stick once the scheduler picks the row up. - if (exec.status !== 'running' && exec.status !== 'pending') continue + // `pending` covers the post-reset, pre-dispatch window; `queued` covers + // the post-enqueue, pre-pickup window — a stop click in either state + // must still stick once the worker picks the row up. + if (exec.status !== 'running' && exec.status !== 'queued' && exec.status !== 'pending') + continue if (exec.jobId) jobIds.push(exec.jobId) executionsPatch[gid] = buildCancelledExecution(exec) cancelledCount++ @@ -318,6 +395,11 @@ export async function cancelWorkflowGroupRuns(tableId: string, rowId?: string): ) ) ) + // `skipScheduler: true` — we're tearing rows down, not waking them up. The + // auto-fire reactor would otherwise see independent (row, group) pairs whose + // deps are now satisfied (because the upstream group already wrote its + // output before the cancel) and re-enqueue them, which is exactly what the + // user clicked Stop to prevent. await Promise.allSettled( mutations.map((m) => updateRow( @@ -327,6 +409,7 @@ export async function cancelWorkflowGroupRuns(tableId: string, rowId?: string): data: {}, workspaceId: table.workspaceId, executionsPatch: m.executionsPatch, + skipScheduler: true, }, table, `wfgrp-cancel-${m.rowId}` @@ -340,50 +423,38 @@ export async function cancelWorkflowGroupRuns(tableId: string, rowId?: string): } /** - * Manually triggers a workflow group for every dep-satisfied row in a table. - * `mode: 'all'` re-runs every eligible row; `mode: 'incomplete'` skips rows - * whose group is already `completed`. When `rowIds` is provided, only those - * rows are candidates — the same eligibility predicate still applies, so a - * mid-run row or one with unmet deps is silently skipped. Eligible rows have - * their output cells cleared and their `executions[groupId]` reset to - * `pending`; the scheduler picks them up and enqueues per-cell jobs. Returns - * the number of rows that were marked for re-run. Used by the - * `groups/[groupId]/run` HTTP route and the Copilot/Mothership - * `run_workflow_group` op so both share one eligibility predicate. + * Run a set of groups across the table or a row subset. Single canonical + * user-driven run op — every UI gesture (single cell, per-row Play, action-bar + * Play/Refresh, column-header menu) reduces to this. `mode: 'all'` re-runs + * completed cells; `mode: 'incomplete'` skips them. `groupIds` omitted = every + * workflow group on the table. `rowIds` omitted = every row. */ -export async function triggerWorkflowGroupRun(opts: { +export async function runWorkflowColumn(opts: { tableId: string - groupId: string workspaceId: string mode: 'all' | 'incomplete' requestId: string + groupIds?: string[] rowIds?: string[] }): Promise<{ triggered: number }> { - const { tableId, groupId, workspaceId, mode, requestId, rowIds } = opts + const { tableId, workspaceId, mode, requestId, groupIds, rowIds } = opts const { getTableById, batchUpdateRows } = await import('./service') const table = await getTableById(tableId) if (!table) throw new Error('Table not found') if (table.workspaceId !== workspaceId) throw new Error('Invalid workspace ID') - const group = (table.schema.workflowGroups ?? []).find((g) => g.id === groupId) - if (!group) throw new Error('Workflow group not found') - - // Push the in-flight / terminal-state filters into SQL so we don't pull - // every row in the table into Node just to discard most of them. Dependency - // satisfaction is still checked in JS afterwards (it can span multiple - // columns and other groups' statuses, so it's awkward to express in JSONB). - const filters = [ - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, workspaceId), - sql`(executions->${groupId}->>'status') IS DISTINCT FROM 'running'`, - sql`((executions->${groupId}->>'status') IS DISTINCT FROM 'pending' OR (executions->${groupId}->>'jobId') IS NULL)`, - ] + const allGroups = table.schema.workflowGroups ?? [] + const targetGroups = groupIds ? allGroups.filter((g) => groupIds.includes(g.id)) : allGroups + if (targetGroups.length === 0) return { triggered: 0 } + + logger.info( + `[Cascade] [${requestId}] manual run table=${tableId} groups=[${targetGroups.map((g) => g.id).join(',')}] rows=${rowIds ? `[${rowIds.join(',')}]` : 'all'} mode=${mode}` + ) + + const filters = [eq(userTableRows.tableId, tableId), eq(userTableRows.workspaceId, workspaceId)] if (rowIds && rowIds.length > 0) { filters.push(inArray(userTableRows.id, rowIds)) } - if (mode === 'incomplete') { - filters.push(sql`(executions->${groupId}->>'status') IS DISTINCT FROM 'completed'`) - } const candidateRows = await db .select({ id: userTableRows.id, @@ -399,7 +470,15 @@ export async function triggerWorkflowGroupRun(opts: { if (candidateRows.length === 0) return { triggered: 0 } - const eligibleRows = candidateRows.filter((r) => { + // Per-row: collect eligible groups, build cleared data + executionsPatch. + type Update = { + rowId: string + data: RowData + executionsPatch: Record + } + const updates: Update[] = [] + const clearedRows: TableRow[] = [] + for (const r of candidateRows) { const tableRow: TableRow = { id: r.id, data: r.data as RowData, @@ -408,37 +487,65 @@ export async function triggerWorkflowGroupRun(opts: { createdAt: r.createdAt, updatedAt: r.updatedAt, } - try { - return areGroupDepsSatisfied(group, tableRow) - } catch { - return false + const eligibleGroups = targetGroups.filter((g) => + isGroupEligible(g, tableRow, { isManualRun: true, mode }) + ) + if (eligibleGroups.length === 0) continue + + const clearedData: RowData = {} + const executionsPatch: Record = {} + for (const g of eligibleGroups) { + for (const o of g.outputs) clearedData[o.columnName] = null + executionsPatch[g.id] = null } - }) + updates.push({ rowId: r.id, data: clearedData, executionsPatch }) + + const remainingExec = { ...tableRow.executions } + for (const g of eligibleGroups) delete remainingExec[g.id] + clearedRows.push({ + ...tableRow, + data: { ...tableRow.data, ...clearedData }, + executions: remainingExec, + }) + } - if (eligibleRows.length === 0) return { triggered: 0 } + if (updates.length === 0) return { triggered: 0 } - const clearedData = Object.fromEntries(group.outputs.map((o) => [o.columnName, null])) as RowData - const updates = eligibleRows.map((r) => { - const pendingExec: RowExecutionMetadata = { - status: 'pending', - executionId: generateId(), - jobId: null, - workflowId: group.workflowId, - error: null, - } - return { - rowId: r.id, - data: clearedData, - executionsPatch: { [groupId]: pendingExec }, - } - }) + // `skipScheduler: true` because we fire `scheduleRunsForRows` ourselves + // below with `isManualRun: true`. Without the skip, batchUpdateRows runs the + // auto-fire reactor first and any autoRun=true sibling group whose deps are + // satisfied would race the manual call. + await batchUpdateRows({ tableId, updates, workspaceId, skipScheduler: true }, table, requestId) - const opResult = await batchUpdateRows({ tableId, updates, workspaceId }, table, requestId) - return { triggered: opResult.affectedCount } + return scheduleRunsForRows(table, clearedRows, { + isManualRun: true, + groupIds: targetGroups.map((g) => g.id), + mode, + }) } // ───────────────────────────── Validation ───────────────────────────── +/** +/** + * Removes the given column names from a group's `dependencies.columns`. When + * the resulting list is empty, drops the `dependencies` field entirely so + * schema validation doesn't see an empty-deps object. Returns the same group + * reference when nothing changed. + */ +export function stripGroupDeps(group: WorkflowGroup, removed: ReadonlySet): WorkflowGroup { + const cols = group.dependencies?.columns + if (!cols || cols.length === 0) return group + const filtered = cols.filter((d) => !removed.has(d)) + if (filtered.length === cols.length) return group + return { + ...group, + ...(filtered.length > 0 + ? { dependencies: { columns: filtered } } + : { dependencies: undefined }), + } +} + /** * Validates schema-level invariants. Run on every `addTableColumn`, * `addWorkflowGroup`, `updateWorkflowGroup`, `renameColumn`, `reorderColumns`, @@ -502,34 +609,27 @@ export function validateSchema(schema: TableSchema, columnOrder: string[] | unde } } - // Dependency integrity. + // Dependency integrity. Deps are columns only — workflow output columns are + // valid deps too (the upstream group fills them, downstream becomes eligible + // when filled). A group can't depend on its own outputs. for (const group of groups) { - const deps = group.dependencies ?? {} - for (const depCol of deps.columns ?? []) { + const ownOutputs = new Set(group.outputs.map((o) => o.columnName)) + for (const depCol of group.dependencies?.columns ?? []) { const col = columnsByName.get(depCol) if (!col) { errors.push(`Group "${group.name ?? group.id}" depends on missing column "${depCol}".`) continue } - if (col.workflowGroupId) { + if (ownOutputs.has(depCol)) { errors.push( - `Group "${group.name ?? group.id}" depends on workflow-output column "${depCol}". Depend on the producing group instead.` + `Group "${group.name ?? group.id}" depends on its own output column "${depCol}".` ) } } - for (const depGroup of deps.workflowGroups ?? []) { - if (!groupsById.has(depGroup)) { - errors.push( - `Group "${group.name ?? group.id}" depends on missing workflow group "${depGroup}".` - ) - } - if (depGroup === group.id) { - errors.push(`Group "${group.name ?? group.id}" depends on itself.`) - } - } } - // Cycle detection on the group dependency graph. + // Cycle detection on the column-induced group graph. An edge A → B exists + // when B depends on a column that A produces. const cycle = findGroupCycle(groups) if (cycle) { errors.push( @@ -549,11 +649,25 @@ export function validateSchema(schema: TableSchema, columnOrder: string[] | unde return errors } -/** Returns the cycle as an ordered list of group ids, or null if acyclic. */ +/** + * Returns the cycle as an ordered list of group ids, or null if acyclic. Edges + * are induced by columns: an edge A → B exists iff B depends on a column that + * A produces. + */ function findGroupCycle(groups: WorkflowGroup[]): string[] | null { + // Map each output column → the group that produces it. + const producerByColumn = new Map() + for (const g of groups) { + for (const o of g.outputs) producerByColumn.set(o.columnName, g.id) + } const adjacency = new Map() for (const g of groups) { - adjacency.set(g.id, g.dependencies?.workflowGroups ?? []) + const upstream = new Set() + for (const depCol of g.dependencies?.columns ?? []) { + const producer = producerByColumn.get(depCol) + if (producer && producer !== g.id) upstream.add(producer) + } + adjacency.set(g.id, [...upstream]) } const VISITING = 1 const VISITED = 2 @@ -590,6 +704,75 @@ interface SplitGroupReport { actual: number[] } +/** + * Cell context stored on `paused_executions.metadata` so the resume worker + * can route post-resume block outputs back to the same `(tableId, rowId, + * groupId)` cell — i.e., one logical cell execution across pause/resume + * cycles instead of two. + */ +export interface CellResumeContext { + tableId: string + tableName: string + rowId: string + groupId: string + workspaceId: string + workflowId: string +} + +interface PausedMetadataPatch { + cellContext?: CellResumeContext + [key: string]: unknown +} + +/** + * Stash the cell context on the matching `paused_executions` row. Called + * by the cell task right after it writes the `pending`/paused state. The + * pause record was written by `PauseResumeManager.persistPauseResult` + * before `executeWorkflow` returned, so the row exists. + */ +export async function stashCellContextForResume( + ctx: CellResumeContext & { executionId: string } +): Promise { + const { executionId, ...cellContext } = ctx + try { + const patch: PausedMetadataPatch = { cellContext } + await db + .update(pausedExecutions) + .set({ + metadata: sql`coalesce(${pausedExecutions.metadata}, '{}'::jsonb) || ${JSON.stringify(patch)}::jsonb`, + updatedAt: new Date(), + }) + .where(eq(pausedExecutions.executionId, executionId)) + } catch (err) { + logger.error( + `Failed to stash cell context on paused_executions (executionId=${executionId}):`, + err + ) + } +} + +/** + * Returns the cell context for an execution if one was stashed at pause + * time. Used by the resume worker to know whether the workflow it's about + * to resume belongs to a table cell — and if so, where to write outputs. + */ +export async function findCellContextByExecutionId( + executionId: string +): Promise { + try { + const [row] = await db + .select({ metadata: pausedExecutions.metadata }) + .from(pausedExecutions) + .where(eq(pausedExecutions.executionId, executionId)) + .limit(1) + const meta = row?.metadata as PausedMetadataPatch | null + return meta?.cellContext ?? null + } catch (err) { + logger.error(`Failed to read cell context for executionId=${executionId}:`, err) + return null + } +} + /** * Returns groups whose output columns occupy non-contiguous positions in the * given columnOrder. Empty array means all groups are cohesive. diff --git a/apps/sim/tools/exa/search.ts b/apps/sim/tools/exa/search.ts index d1a09739ede..c3b2c5c779d 100644 --- a/apps/sim/tools/exa/search.ts +++ b/apps/sim/tools/exa/search.ts @@ -127,7 +127,7 @@ export const searchTool: ToolConfig = { }, rateLimit: { mode: 'per_request', - requestsPerMinute: 5, + requestsPerMinute: 60, }, }, diff --git a/scripts/check-api-validation-contracts.ts b/scripts/check-api-validation-contracts.ts index 30456062079..bbf0d0f2494 100644 --- a/scripts/check-api-validation-contracts.ts +++ b/scripts/check-api-validation-contracts.ts @@ -9,8 +9,8 @@ const QUERY_HOOKS_DIR = path.join(ROOT, 'apps/sim/hooks/queries') const SELECTOR_HOOKS_DIR = path.join(ROOT, 'apps/sim/hooks/selectors') const BASELINE = { - totalRoutes: 735, - zodRoutes: 735, + totalRoutes: 734, + zodRoutes: 734, nonZodRoutes: 0, } as const From d081ab283e5f3e036bfbb6c4941a77afe08486a3 Mon Sep 17 00:00:00 2001 From: Waleed Date: Thu, 7 May 2026 12:54:42 -0700 Subject: [PATCH 02/33] fix(logs): relax fileSchema so execution logs with files render again (#4495) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(logs): relax fileSchema so execution logs with files render again * improvement(logs): align fileSchema with shared UserFile type - contracts/logs.ts: replace local fileSchema with mediaUserFileSchema (the established UserFile boundary schema with .passthrough()) - file-download.tsx: drop local FileData interface, use UserFile from @/executor/types * improvement(contracts): promote userFileSchema to primitives Move the canonical UserFile boundary schema out of tools/media/shared.ts (where it didn't belong — logs aren't media tools) into primitives.ts as userFileSchema. Update logs, stt, and video contracts to import from the shared primitive. Co-Authored-By: Claude Opus 4.7 --------- Co-authored-by: Claude Opus 4.7 --- .../file-download/file-download.tsx | 23 +++++-------------- apps/sim/lib/api/contracts/logs.ts | 16 ++----------- apps/sim/lib/api/contracts/primitives.ts | 20 ++++++++++++++++ .../lib/api/contracts/tools/media/shared.ts | 13 ----------- apps/sim/lib/api/contracts/tools/media/stt.ts | 5 ++-- .../lib/api/contracts/tools/media/video.ts | 5 ++-- 6 files changed, 34 insertions(+), 48 deletions(-) diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/file-download/file-download.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/file-download/file-download.tsx index 23670cf2c02..9a2e09ce471 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/file-download/file-download.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/file-download/file-download.tsx @@ -5,31 +5,20 @@ import { createLogger } from '@sim/logger' import { ArrowDown } from 'lucide-react' import { useRouter } from 'next/navigation' import { Button, Loader } from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' import { extractWorkspaceIdFromExecutionKey, getViewerUrl } from '@/lib/uploads/utils/file-utils' +import type { UserFile } from '@/executor/types' const logger = createLogger('FileCards') -interface FileData { - id?: string - name: string - size: number - type: string - key: string - url: string - uploadedAt: string - expiresAt: string - storageProvider?: 's3' | 'blob' | 'local' - bucketName?: string -} - interface FileCardsProps { - files: FileData[] + files: UserFile[] isExecutionFile?: boolean workspaceId?: string } interface FileCardProps { - file: FileData + file: UserFile isExecutionFile?: boolean workspaceId?: string } @@ -157,7 +146,7 @@ export function FileDownload({ className, workspaceId, }: { - file: FileData + file: UserFile isExecutionFile?: boolean className?: string workspaceId?: string @@ -220,7 +209,7 @@ export function FileDownload({ return ( diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/general.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/general.tsx index 695d6c95cf5..1a41d9f6299 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/general.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/general.tsx @@ -1,6 +1,6 @@ 'use client' -import { useCallback, useMemo, useState } from 'react' +import { useCallback, useEffect, useMemo, useState } from 'react' import { createLogger } from '@sim/logger' import { Button, @@ -17,8 +17,10 @@ import { Tooltip, } from '@/components/emcn' import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils' +import type { DeployReadiness } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness' import { Preview, PreviewWorkflow } from '@/app/workspace/[workspaceId]/w/components/preview' import { useDeploymentVersionState, useRevertToVersion } from '@/hooks/queries/workflows' +import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import type { WorkflowState } from '@/stores/workflows/workflow/types' import { Versions } from './components' @@ -30,8 +32,11 @@ interface GeneralDeployProps { isLoadingDeployedState: boolean versions: WorkflowDeploymentVersionResponse[] versionsLoading: boolean + isPromotingVersion: boolean + deployReadiness: DeployReadiness onPromoteToLive: (version: number) => Promise onLoadDeploymentComplete: () => void + onLoadDeploymentBlocked: (message: string) => void } type PreviewMode = 'active' | 'selected' @@ -45,8 +50,11 @@ export function GeneralDeploy({ isLoadingDeployedState, versions, versionsLoading, + isPromotingVersion, + deployReadiness, onPromoteToLive, onLoadDeploymentComplete, + onLoadDeploymentBlocked, }: GeneralDeployProps) { const [selectedVersion, setSelectedVersion] = useState(null) const [showActiveDespiteSelection, setShowActiveDespiteSelection] = useState(false) @@ -56,12 +64,18 @@ export function GeneralDeploy({ const [showLoadDialog, setShowLoadDialog] = useState(false) const [showPromoteDialog, setShowPromoteDialog] = useState(false) const [showExpandedPreview, setShowExpandedPreview] = useState(false) - const [versionToLoad, setVersionToLoad] = useState(null) - const [versionToPromote, setVersionToPromote] = useState(null) + const [versionToLoad, setVersionToLoad] = useState<{ + workflowId: string + version: number + } | null>(null) + const [versionToPromote, setVersionToPromote] = useState<{ + workflowId: string + version: number + } | null>(null) const selectedVersionInfo = versions.find((v) => v.version === selectedVersion) - const versionToPromoteInfo = versions.find((v) => v.version === versionToPromote) - const versionToLoadInfo = versions.find((v) => v.version === versionToLoad) + const versionToPromoteInfo = versions.find((v) => v.version === versionToPromote?.version) + const versionToLoadInfo = versions.find((v) => v.version === versionToLoad?.version) const { data: selectedVersionState } = useDeploymentVersionState(workflowId, selectedVersion) @@ -72,40 +86,82 @@ export function GeneralDeploy({ setShowActiveDespiteSelection(false) }, []) - const handleLoadDeployment = useCallback((version: number) => { - setVersionToLoad(version) - setShowLoadDialog(true) - }, []) + const handleLoadDeployment = useCallback( + (version: number) => { + if (!workflowId) return + setVersionToLoad({ workflowId, version }) + setShowLoadDialog(true) + }, + [workflowId] + ) - const handlePromoteToLive = useCallback((version: number) => { - setVersionToPromote(version) - setShowPromoteDialog(true) - }, []) + const handlePromoteToLive = useCallback( + (version: number) => { + if (!workflowId) return + setVersionToPromote({ workflowId, version }) + setShowPromoteDialog(true) + }, + [workflowId] + ) const confirmLoadDeployment = async () => { - if (!workflowId || versionToLoad === null) return + if (!versionToLoad) return + const target = versionToLoad + if (!(await deployReadiness.waitUntilReady())) { + if ( + workflowId !== target.workflowId || + useWorkflowRegistry.getState().activeWorkflowId !== target.workflowId + ) { + setShowLoadDialog(false) + setVersionToLoad(null) + return + } + onLoadDeploymentBlocked(deployReadiness.tooltip) + return + } + if ( + workflowId !== target.workflowId || + useWorkflowRegistry.getState().activeWorkflowId !== target.workflowId + ) { + setShowLoadDialog(false) + setVersionToLoad(null) + return + } setShowLoadDialog(false) - const version = versionToLoad setVersionToLoad(null) try { - await revertMutation.mutateAsync({ workflowId, version }) + await revertMutation.mutateAsync({ workflowId: target.workflowId, version: target.version }) onLoadDeploymentComplete() } catch (error) { logger.error('Failed to load deployment:', error) } } + useEffect(() => { + setShowLoadDialog(false) + setVersionToLoad(null) + setShowPromoteDialog(false) + setVersionToPromote(null) + }, [workflowId]) + const confirmPromoteToLive = async () => { - if (versionToPromote === null) return + if (!versionToPromote || isPromotingVersion) return + const target = versionToPromote setShowPromoteDialog(false) - const version = versionToPromote setVersionToPromote(null) + if ( + workflowId !== target.workflowId || + useWorkflowRegistry.getState().activeWorkflowId !== target.workflowId + ) { + return + } + try { - await onPromoteToLive(version) + await onPromoteToLive(target.version) } catch (error) { logger.error('Failed to promote version:', error) } @@ -221,6 +277,7 @@ export function GeneralDeploy({ workflowId={workflowId} versions={versions} versionsLoading={versionsLoading} + isPromotingVersion={isPromotingVersion} selectedVersion={selectedVersion} onSelectVersion={handleSelectVersion} onPromoteToLive={handlePromoteToLive} @@ -236,7 +293,7 @@ export function GeneralDeploy({

Are you sure you want to load{' '} - {versionToLoadInfo?.name || `v${versionToLoad}`} + {versionToLoadInfo?.name || `v${versionToLoad?.version}`} ?{' '} @@ -262,7 +319,7 @@ export function GeneralDeploy({

Are you sure you want to promote{' '} - {versionToPromoteInfo?.name || `v${versionToPromote}`} + {versionToPromoteInfo?.name || `v${versionToPromote?.version}`} {' '} to live?{' '} @@ -274,7 +331,7 @@ export function GeneralDeploy({ - diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal.tsx index 38df5590737..1103e3d7fe6 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal.tsx @@ -2,11 +2,13 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' import { useQueryClient } from '@tanstack/react-query' import { useParams } from 'next/navigation' import { Badge, Button, + Loader, Modal, ModalBody, ModalContent, @@ -21,6 +23,12 @@ import { getBaseUrl } from '@/lib/core/utils/urls' import { getInputFormatExample as getInputFormatExampleUtil } from '@/lib/workflows/operations/deployment-utils' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { CreateApiKeyModal } from '@/app/workspace/[workspaceId]/settings/components/api-keys/components' +import { + releaseDeployAction, + tryAcquireDeployAction, +} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/deploy-action-lock' +import { syncLocalDraftFromServer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/sync-local-draft' +import type { DeployReadiness } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness' import { runPreDeployChecks } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-predeploy-checks' import { startsWithUuid } from '@/executor/constants' import { useA2AAgentByWorkflow } from '@/hooks/queries/a2a/agents' @@ -40,6 +48,7 @@ import { useWorkflowMap } from '@/hooks/queries/workflows' import { useWorkspaceSettings } from '@/hooks/queries/workspace' import { usePermissionConfig } from '@/hooks/use-permission-config' import { useSettingsNavigation } from '@/hooks/use-settings-navigation' +import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { mergeSubblockState } from '@/stores/workflows/utils' import { useWorkflowStore } from '@/stores/workflows/workflow/store' import type { WorkflowState } from '@/stores/workflows/workflow/types' @@ -62,6 +71,8 @@ interface DeployModalProps { needsRedeployment: boolean deployedState?: WorkflowState | null isLoadingDeployedState: boolean + deployReadiness: DeployReadiness + isDeploymentSettling: boolean } interface WorkflowDeploymentInfoUI { @@ -84,6 +95,8 @@ export function DeployModal({ needsRedeployment, deployedState, isLoadingDeployedState, + deployReadiness, + isDeploymentSettling, }: DeployModalProps) { const queryClient = useQueryClient() const params = useParams() @@ -97,10 +110,12 @@ export function DeployModal({ const [chatSubmitting, setChatSubmitting] = useState(false) const [deployError, setDeployError] = useState(null) const [deployWarnings, setDeployWarnings] = useState([]) + const [isFinalizingDeploy, setIsFinalizingDeploy] = useState(false) + const [isActivatingVersion, setIsActivatingVersion] = useState(false) const [isChatFormValid, setIsChatFormValid] = useState(false) const [selectedStreamingOutputs, setSelectedStreamingOutputs] = useState([]) - const [showUndeployConfirm, setShowUndeployConfirm] = useState(false) + const [undeployTargetWorkflowId, setUndeployTargetWorkflowId] = useState(null) // const [templateFormValid, setTemplateFormValid] = useState(false) // const [templateSubmitting, setTemplateSubmitting] = useState(false) const [mcpToolSubmitting, setMcpToolSubmitting] = useState(false) @@ -112,6 +127,8 @@ export function DeployModal({ const [chatSuccess, setChatSuccess] = useState(false) const chatSuccessTimeoutRef = useRef | null>(null) + const deployActionIdRef = useRef(0) + const activateVersionInFlightRef = useRef(false) const [isCreateKeyModalOpen, setIsCreateKeyModalOpen] = useState(false) const [isApiInfoModalOpen, setIsApiInfoModalOpen] = useState(false) @@ -176,6 +193,35 @@ export function DeployModal({ const versions = versionsData?.versions ?? [] + const isWorkflowStillActive = useCallback((targetWorkflowId: string) => { + return useWorkflowRegistry.getState().activeWorkflowId === targetWorkflowId + }, []) + + const syncDraftAfterDeploy = useCallback(async (): Promise => { + if (!workflowId) return null + + try { + const syncedActiveWorkflow = await syncLocalDraftFromServer(workflowId) + if (!syncedActiveWorkflow && isWorkflowStillActive(workflowId)) { + return 'Deployment succeeded, but local sync is still catching up. Refresh if the status looks stale.' + } + return null + } catch (error) { + if (!isWorkflowStillActive(workflowId)) return null + logger.warn('Workflow deployed, but local draft sync failed', { + workflowId, + error: toError(error).message, + }) + return 'Deployment succeeded, but local sync failed. Refresh if the status looks stale.' + } + }, [workflowId, isWorkflowStillActive]) + + useEffect(() => { + deployActionIdRef.current += 1 + setIsFinalizingDeploy(false) + setUndeployTargetWorkflowId(null) + }, [workflowId]) + const getApiKeyLabel = useCallback( (value?: string | null) => { if (value && value.trim().length > 0) { @@ -285,87 +331,157 @@ export function DeployModal({ const onDeploy = useCallback(async () => { if (!workflowId) return + if (!tryAcquireDeployAction(workflowId)) return + const actionId = deployActionIdRef.current + 1 + deployActionIdRef.current = actionId + setIsFinalizingDeploy(true) setDeployError(null) setDeployWarnings([]) try { - // Deploy mutation handles query invalidation in its onSuccess callback - const result = await deployMutation.mutateAsync({ workflowId }) - if (result.warnings && result.warnings.length > 0) { - setDeployWarnings(result.warnings) + if (!(await deployReadiness.waitUntilReady())) { + if (!isWorkflowStillActive(workflowId) || deployActionIdRef.current !== actionId) return + setDeployError(deployReadiness.tooltip) + return + } + if (!isWorkflowStillActive(workflowId) || deployActionIdRef.current !== actionId) return + + try { + const result = await deployMutation.mutateAsync({ workflowId }) + const syncWarning = await syncDraftAfterDeploy() + if (!isWorkflowStillActive(workflowId) || deployActionIdRef.current !== actionId) return + setDeployWarnings([...(result.warnings || []), ...(syncWarning ? [syncWarning] : [])]) + } finally { + if (deployActionIdRef.current === actionId) { + setIsFinalizingDeploy(false) + } } } catch (error: unknown) { + if (deployActionIdRef.current !== actionId) return + if (!isWorkflowStillActive(workflowId)) return logger.error('Error deploying workflow:', { error }) - const errorMessage = error instanceof Error ? error.message : 'Failed to deploy workflow' + const errorMessage = toError(error).message || 'Failed to deploy workflow' setDeployError(errorMessage) + } finally { + releaseDeployAction(workflowId) + if (deployActionIdRef.current === actionId) { + setIsFinalizingDeploy(false) + } } - }, [workflowId, deployMutation]) + }, [workflowId, deployMutation, deployReadiness, syncDraftAfterDeploy, isWorkflowStillActive]) const handlePromoteToLive = useCallback( async (version: number) => { if (!workflowId) return + if (activateVersionInFlightRef.current) return + activateVersionInFlightRef.current = true + setIsActivatingVersion(true) setDeployWarnings([]) try { const result = await activateVersionMutation.mutateAsync({ workflowId, version }) + if (!isWorkflowStillActive(workflowId)) return if (result.warnings && result.warnings.length > 0) { setDeployWarnings(result.warnings) } } catch (error) { + if (!isWorkflowStillActive(workflowId)) return logger.error('Error promoting version:', { error }) throw error + } finally { + activateVersionInFlightRef.current = false + setIsActivatingVersion(false) } }, - [workflowId, activateVersionMutation] + [workflowId, activateVersionMutation, isWorkflowStillActive] ) const handleUndeploy = useCallback(async () => { - if (!workflowId) return + if (!undeployTargetWorkflowId) return + const targetWorkflowId = undeployTargetWorkflowId + if (workflowId !== targetWorkflowId || !isWorkflowStillActive(targetWorkflowId)) { + setUndeployTargetWorkflowId(null) + return + } + + setDeployWarnings([]) try { - await undeployMutation.mutateAsync({ workflowId }) - setShowUndeployConfirm(false) + const result = await undeployMutation.mutateAsync({ workflowId: targetWorkflowId }) + if (!isWorkflowStillActive(targetWorkflowId)) return + setUndeployTargetWorkflowId(null) + if (result.warnings && result.warnings.length > 0) { + setDeployWarnings(result.warnings) + return + } onOpenChange(false) } catch (error: unknown) { + if (!isWorkflowStillActive(targetWorkflowId)) return logger.error('Error undeploying workflow:', { error }) } - }, [workflowId, undeployMutation, onOpenChange]) + }, [workflowId, undeployTargetWorkflowId, undeployMutation, onOpenChange, isWorkflowStillActive]) const handleRedeploy = useCallback(async () => { if (!workflowId) return + if (!tryAcquireDeployAction(workflowId)) return + const actionId = deployActionIdRef.current + 1 + deployActionIdRef.current = actionId + setIsFinalizingDeploy(true) setDeployError(null) setDeployWarnings([]) - const { blocks, edges, loops, parallels } = useWorkflowStore.getState() - const liveBlocks = mergeSubblockState(blocks, workflowId) - const checkResult = runPreDeployChecks({ - blocks: liveBlocks, - edges, - loops, - parallels, - workflowId, - }) - if (!checkResult.passed) { - setDeployError(checkResult.error || 'Pre-deploy validation failed') - return - } - try { - const result = await deployMutation.mutateAsync({ workflowId }) - if (result.warnings && result.warnings.length > 0) { - setDeployWarnings(result.warnings) + if (!(await deployReadiness.waitUntilReady())) { + if (!isWorkflowStillActive(workflowId) || deployActionIdRef.current !== actionId) return + setDeployError(deployReadiness.tooltip) + return + } + if (!isWorkflowStillActive(workflowId) || deployActionIdRef.current !== actionId) return + + const { blocks, edges, loops, parallels } = useWorkflowStore.getState() + const liveBlocks = mergeSubblockState(blocks, workflowId) + const checkResult = runPreDeployChecks({ + blocks: liveBlocks, + edges, + loops, + parallels, + workflowId, + }) + if (!checkResult.passed) { + setDeployError(checkResult.error || 'Pre-deploy validation failed') + return + } + + try { + const result = await deployMutation.mutateAsync({ workflowId }) + const syncWarning = await syncDraftAfterDeploy() + if (!isWorkflowStillActive(workflowId) || deployActionIdRef.current !== actionId) return + setDeployWarnings([...(result.warnings || []), ...(syncWarning ? [syncWarning] : [])]) + } finally { + if (deployActionIdRef.current === actionId) { + setIsFinalizingDeploy(false) + } } } catch (error: unknown) { + if (deployActionIdRef.current !== actionId) return + if (!isWorkflowStillActive(workflowId)) return logger.error('Error redeploying workflow:', { error }) - const errorMessage = error instanceof Error ? error.message : 'Failed to redeploy workflow' + const errorMessage = toError(error).message || 'Failed to redeploy workflow' setDeployError(errorMessage) + } finally { + releaseDeployAction(workflowId) + if (deployActionIdRef.current === actionId) { + setIsFinalizingDeploy(false) + } } - }, [workflowId, deployMutation]) + }, [workflowId, deployMutation, deployReadiness, syncDraftAfterDeploy, isWorkflowStillActive]) const handleCloseModal = useCallback(() => { + deployActionIdRef.current += 1 + setIsFinalizingDeploy(false) setChatSubmitting(false) setDeployError(null) setDeployWarnings([]) @@ -456,7 +572,7 @@ export function DeployModal({ // deleteTrigger?.click() // }, []) - const isSubmitting = deployMutation.isPending + const isSubmitting = deployMutation.isPending || isFinalizingDeploy const isUndeploying = undeployMutation.isPending return ( @@ -514,8 +630,11 @@ export function DeployModal({ isLoadingDeployedState={isLoadingDeployedState} versions={versions} versionsLoading={versionsLoading} + isPromotingVersion={isActivatingVersion || activateVersionMutation.isPending} + deployReadiness={deployReadiness} onPromoteToLive={handlePromoteToLive} onLoadDeploymentComplete={handleCloseModal} + onLoadDeploymentBlocked={setDeployError} /> @@ -610,9 +729,13 @@ export function DeployModal({ needsRedeployment={needsRedeployment} isSubmitting={isSubmitting} isUndeploying={isUndeploying} + deployReadiness={deployReadiness} + isDeploymentSettling={isDeploymentSettling} onDeploy={onDeploy} onRedeploy={handleRedeploy} - onUndeploy={() => setShowUndeployConfirm(true)} + onUndeploy={() => { + if (workflowId) setUndeployTargetWorkflowId(workflowId) + }} /> )} {activeTab === 'api' && ( @@ -841,7 +964,12 @@ export function DeployModal({ - + { + if (!nextOpen) setUndeployTargetWorkflowId(null) + }} + > Undeploy API @@ -855,7 +983,7 @@ export function DeployModal({

@@ -990,14 +1132,20 @@ function GeneralFooter({ return ( - +
+ + {blockedMessage && ( +
{blockedMessage}
+ )} +
- {needsRedeployment && ( - )}
diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/deploy.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/deploy.tsx index 5d225bbd7f5..ec630e08781 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/deploy.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/deploy.tsx @@ -6,6 +6,7 @@ import { DeployModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/compon import { useChangeDetection, useDeployment, + useDeployReadiness, } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks' import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow' import { useDeployedWorkflowState, useDeploymentInfo } from '@/hooks/queries/deployments' @@ -42,21 +43,30 @@ export function Deploy({ isFetching: isFetchingDeployedState, } = useDeployedWorkflowState(activeWorkflowId, { enabled: isDeployedStateEnabled }) const deployedState = isDeployedStateEnabled ? (deployedStateData ?? null) : null + const deployReadiness = useDeployReadiness(activeWorkflowId) - const { changeDetected } = useChangeDetection({ + const { changeDetected, isChangeDetectionSettling } = useChangeDetection({ workflowId: activeWorkflowId, deployedState, isLoadingDeployedState: isLoadingDeployedState || isFetchingDeployedState, }) + const isDeploymentSettling = isChangeDetectionSettling || deployReadiness.isSyncing const { isDeploying, handleDeployClick } = useDeployment({ workflowId: activeWorkflowId, isDeployed, + deployReadiness, }) const isEmpty = !hasBlocks() const canDeploy = userPermissions.canAdmin - const isDisabled = disabled || isDeploying || !canDeploy || isEmpty + const isDisabled = + disabled || + isDeploying || + !canDeploy || + isEmpty || + isDeploymentSettling || + (!isDeployed && deployReadiness.isBlocked) const onDeployClick = async () => { if (disabled || !canDeploy || !activeWorkflowId) return @@ -80,6 +90,12 @@ export function Deploy({ if (isDeploying) { return 'Deploying...' } + if (isChangeDetectionSettling) { + return 'Syncing deployment state...' + } + if (deployReadiness.isBlocked && !isDeployed) { + return deployReadiness.tooltip + } if (changeDetected) { return 'Update deployment' } @@ -89,6 +105,19 @@ export function Deploy({ return 'Deploy workflow' } + const getButtonLabel = () => { + if (isDeployed && (changeDetected || isDeploymentSettling)) { + return 'Update' + } + if (changeDetected) { + return 'Update' + } + if (isDeployed) { + return 'Live' + } + return 'Deploy' + } + return ( <> @@ -97,13 +126,19 @@ export function Deploy({
@@ -117,7 +152,9 @@ export function Deploy({ isDeployed={isDeployed} needsRedeployment={changeDetected} deployedState={deployedState} - isLoadingDeployedState={isLoadingDeployedState} + isLoadingDeployedState={isLoadingDeployedState || isFetchingDeployedState} + deployReadiness={deployReadiness} + isDeploymentSettling={isDeploymentSettling} /> ) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/deploy-action-lock.test.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/deploy-action-lock.test.ts new file mode 100644 index 00000000000..92f1f23890b --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/deploy-action-lock.test.ts @@ -0,0 +1,24 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it } from 'vitest' +import { + releaseDeployAction, + tryAcquireDeployAction, +} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/deploy-action-lock' + +describe('deploy action lock', () => { + it('serializes deploy actions per workflow', () => { + try { + expect(tryAcquireDeployAction('workflow-a')).toBe(true) + expect(tryAcquireDeployAction('workflow-a')).toBe(false) + expect(tryAcquireDeployAction('workflow-b')).toBe(true) + + releaseDeployAction('workflow-a') + expect(tryAcquireDeployAction('workflow-a')).toBe(true) + } finally { + releaseDeployAction('workflow-a') + releaseDeployAction('workflow-b') + } + }) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/deploy-action-lock.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/deploy-action-lock.ts new file mode 100644 index 00000000000..d2de6485b17 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/deploy-action-lock.ts @@ -0,0 +1,14 @@ +const activeDeployActions = new Set() + +export function tryAcquireDeployAction(workflowId: string): boolean { + if (activeDeployActions.has(workflowId)) { + return false + } + + activeDeployActions.add(workflowId) + return true +} + +export function releaseDeployAction(workflowId: string): void { + activeDeployActions.delete(workflowId) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/index.ts index 9bd9bf02271..171b8718bbc 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/index.ts @@ -1,2 +1,4 @@ export { useChangeDetection } from './use-change-detection' +export type { DeployReadiness } from './use-deploy-readiness' +export { getDeployReadinessState, useDeployReadiness } from './use-deploy-readiness' export { useDeployment } from './use-deployment' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/sync-local-draft.test.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/sync-local-draft.test.ts new file mode 100644 index 00000000000..6abc9a694d0 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/sync-local-draft.test.ts @@ -0,0 +1,246 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { + mockRequestJson, + mockApplyWorkflowStateToStores, + mockGetRegistryState, + mockHasPendingOperations, + mockGetOperationQueueState, + mockGetWorkflowDiffState, +} = vi.hoisted(() => ({ + mockRequestJson: vi.fn(), + mockApplyWorkflowStateToStores: vi.fn(), + mockGetRegistryState: vi.fn(() => ({ activeWorkflowId: 'workflow-a' })), + mockHasPendingOperations: vi.fn(() => false), + mockGetOperationQueueState: vi.fn(() => ({ + hasPendingOperations: mockHasPendingOperations, + workflowOperationVersions: {}, + })), + mockGetWorkflowDiffState: vi.fn(() => ({ + hasActiveDiff: false, + pendingExternalUpdates: {}, + reconcilingWorkflows: {}, + reconciliationErrors: {}, + remoteUpdateVersions: {}, + })), +})) + +vi.mock('@/lib/api/client/request', () => ({ + requestJson: mockRequestJson, +})) + +vi.mock('@/lib/api/contracts', () => ({ + getWorkflowStateContract: {}, +})) + +vi.mock('@/stores/workflow-diff/utils', () => ({ + applyWorkflowStateToStores: mockApplyWorkflowStateToStores, +})) + +vi.mock('@/stores/workflow-diff/store', () => ({ + useWorkflowDiffStore: { + getState: mockGetWorkflowDiffState, + }, +})) + +vi.mock('@/stores/operation-queue/store', () => ({ + useOperationQueueStore: { + getState: mockGetOperationQueueState, + }, +})) + +vi.mock('@/stores/workflows/registry/store', () => ({ + useWorkflowRegistry: { + getState: mockGetRegistryState, + }, +})) + +import { syncLocalDraftFromServer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/sync-local-draft' + +describe('syncLocalDraftFromServer', () => { + beforeEach(() => { + vi.clearAllMocks() + mockGetRegistryState.mockReturnValue({ activeWorkflowId: 'workflow-a' }) + mockHasPendingOperations.mockReturnValue(false) + mockGetOperationQueueState.mockImplementation(() => ({ + hasPendingOperations: mockHasPendingOperations, + workflowOperationVersions: {}, + })) + mockGetWorkflowDiffState.mockReturnValue({ + hasActiveDiff: false, + pendingExternalUpdates: {}, + reconcilingWorkflows: {}, + reconciliationErrors: {}, + remoteUpdateVersions: {}, + }) + }) + + it('hydrates sibling workflow variables into the applied workflow state', async () => { + mockRequestJson.mockResolvedValue({ + data: { + state: { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + lastSaved: 1, + }, + variables: { + 'variable-a': { + id: 'variable-a', + name: 'API_KEY', + type: 'plain', + value: 'secret', + }, + }, + }, + }) + + await expect(syncLocalDraftFromServer('workflow-a')).resolves.toBe(true) + + expect(mockApplyWorkflowStateToStores).toHaveBeenCalledWith( + 'workflow-a', + expect.objectContaining({ + variables: { + 'variable-a': { + id: 'variable-a', + name: 'API_KEY', + type: 'plain', + value: 'secret', + }, + }, + }), + { updateLastSaved: true } + ) + }) + + it('does not apply a fetched draft after navigation changes the active workflow', async () => { + mockGetRegistryState + .mockReturnValueOnce({ activeWorkflowId: 'workflow-a' }) + .mockReturnValueOnce({ activeWorkflowId: 'workflow-b' }) + mockRequestJson.mockResolvedValue({ + data: { + state: { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + lastSaved: 1, + }, + variables: {}, + }, + }) + + await expect(syncLocalDraftFromServer('workflow-a')).resolves.toBe(false) + + expect(mockApplyWorkflowStateToStores).not.toHaveBeenCalled() + }) + + it('does not synthesize an empty variables object when the server omits variables', async () => { + mockRequestJson.mockResolvedValue({ + data: { + state: { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + lastSaved: 1, + }, + }, + }) + + await expect(syncLocalDraftFromServer('workflow-a')).resolves.toBe(true) + + const appliedState = mockApplyWorkflowStateToStores.mock.calls[0][1] + expect(Object.hasOwn(appliedState, 'variables')).toBe(false) + }) + + it('does not apply a fetched draft over newly queued local operations', async () => { + mockHasPendingOperations.mockReturnValueOnce(false).mockReturnValueOnce(true) + mockRequestJson.mockResolvedValue({ + data: { + state: { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + lastSaved: 1, + }, + variables: {}, + }, + }) + + await expect(syncLocalDraftFromServer('workflow-a')).resolves.toBe(false) + + expect(mockApplyWorkflowStateToStores).not.toHaveBeenCalled() + }) + + it('does not apply a fetched draft when a newer remote update arrives during fetch', async () => { + mockGetWorkflowDiffState + .mockReturnValueOnce({ + hasActiveDiff: false, + pendingExternalUpdates: {}, + reconcilingWorkflows: {}, + reconciliationErrors: {}, + remoteUpdateVersions: {}, + }) + .mockReturnValueOnce({ + hasActiveDiff: false, + pendingExternalUpdates: {}, + reconcilingWorkflows: {}, + reconciliationErrors: {}, + remoteUpdateVersions: { 'workflow-a': 1 }, + }) + mockRequestJson.mockResolvedValue({ + data: { + state: { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + lastSaved: 1, + }, + variables: {}, + }, + }) + + await expect(syncLocalDraftFromServer('workflow-a')).resolves.toBe(false) + + expect(mockApplyWorkflowStateToStores).not.toHaveBeenCalled() + }) + + it('does not apply a fetched draft when local operations queue and drain during fetch', async () => { + mockGetOperationQueueState + .mockReturnValueOnce({ + hasPendingOperations: mockHasPendingOperations, + workflowOperationVersions: {}, + }) + .mockReturnValueOnce({ + hasPendingOperations: mockHasPendingOperations, + workflowOperationVersions: {}, + }) + .mockReturnValueOnce({ + hasPendingOperations: mockHasPendingOperations, + workflowOperationVersions: { 'workflow-a': 1 }, + }) + mockRequestJson.mockResolvedValue({ + data: { + state: { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + lastSaved: 1, + }, + variables: {}, + }, + }) + + await expect(syncLocalDraftFromServer('workflow-a')).resolves.toBe(false) + + expect(mockApplyWorkflowStateToStores).not.toHaveBeenCalled() + }) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/sync-local-draft.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/sync-local-draft.ts new file mode 100644 index 00000000000..ad308ee549c --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/sync-local-draft.ts @@ -0,0 +1,59 @@ +import { requestJson } from '@/lib/api/client/request' +import { getWorkflowStateContract } from '@/lib/api/contracts' +import { useOperationQueueStore } from '@/stores/operation-queue/store' +import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' +import { applyWorkflowStateToStores } from '@/stores/workflow-diff/utils' +import { useWorkflowRegistry } from '@/stores/workflows/registry/store' +import type { WorkflowState } from '@/stores/workflows/workflow/types' + +function canApplyServerSnapshot( + workflowId: string, + remoteVersionAtStart: number, + localOperationVersionAtStart: number +): boolean { + if (useWorkflowRegistry.getState().activeWorkflowId !== workflowId) return false + const operationQueueState = useOperationQueueStore.getState() + if (operationQueueState.hasPendingOperations(workflowId)) return false + if ( + (operationQueueState.workflowOperationVersions[workflowId] ?? 0) !== + localOperationVersionAtStart + ) { + return false + } + + const diffState = useWorkflowDiffStore.getState() + return ( + !diffState.hasActiveDiff && + !diffState.pendingExternalUpdates[workflowId] && + !diffState.reconcilingWorkflows[workflowId] && + !diffState.reconciliationErrors[workflowId] && + (diffState.remoteUpdateVersions[workflowId] ?? 0) === remoteVersionAtStart + ) +} + +export async function syncLocalDraftFromServer(workflowId: string): Promise { + if (useWorkflowRegistry.getState().activeWorkflowId !== workflowId) return false + if (useOperationQueueStore.getState().hasPendingOperations(workflowId)) return false + const localOperationVersionAtStart = + useOperationQueueStore.getState().workflowOperationVersions[workflowId] ?? 0 + const remoteVersionAtStart = useWorkflowDiffStore.getState().remoteUpdateVersions[workflowId] ?? 0 + + const responseData = await requestJson(getWorkflowStateContract, { + params: { id: workflowId }, + }) + const wireState = responseData.data?.state + if (!canApplyServerSnapshot(workflowId, remoteVersionAtStart, localOperationVersionAtStart)) { + return false + } + if (!wireState) { + throw new Error('No workflow state was returned while syncing the local draft') + } + + // double-cast-allowed: workflowStateSchema is a wire supertype; normalized workflow state is persisted in store-compatible shape + const workflowState = wireState as unknown as WorkflowState + if (Object.hasOwn(responseData.data, 'variables')) { + workflowState.variables = responseData.data.variables || {} + } + applyWorkflowStateToStores(workflowId, workflowState, { updateLastSaved: true }) + return true +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-change-detection.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-change-detection.ts index a9a319688e1..a66bc19990e 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-change-detection.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-change-detection.ts @@ -70,5 +70,8 @@ export function useChangeDetection({ return hasWorkflowChanged(currentState, deployedState) }, [currentState, deployedState, isLoadingDeployedState]) - return { changeDetected } + return { + changeDetected, + isChangeDetectionSettling: Boolean(workflowId && isLoadingDeployedState), + } } diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness.test.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness.test.ts new file mode 100644 index 00000000000..39b0a3849b2 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness.test.ts @@ -0,0 +1,101 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it, vi } from 'vitest' + +vi.mock('@/stores/operation-queue/store', () => ({ + useOperationQueueStore: Object.assign( + () => ({ hasPendingOperations: false, hasOperationError: false }), + { + getState: () => ({ + hasOperationError: false, + hasPendingOperations: () => false, + waitForWorkflowOperations: () => Promise.resolve(true), + }), + } + ), +})) + +vi.mock('@/stores/workflow-diff/store', () => ({ + useWorkflowDiffStore: Object.assign( + () => ({ + hasActiveDiff: false, + hasPendingExternalUpdate: false, + isReconciling: false, + }), + { + getState: () => ({ + hasActiveDiff: false, + pendingExternalUpdates: {}, + reconcilingWorkflows: {}, + reconciliationErrors: {}, + }), + } + ), +})) + +import { getDeployReadinessState } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness' + +const baseInput = { + workflowId: 'workflow-a', + hasPendingOperations: false, + hasOperationError: false, + hasActiveDiff: false, + hasPendingExternalUpdate: false, + isReconciling: false, + reconciliationError: undefined, +} + +describe('getDeployReadinessState', () => { + it('allows deploy when no local persistence or reconciliation is pending', () => { + expect(getDeployReadinessState(baseInput).status).toBe('ready') + }) + + it('blocks deploy while active workflow operations are pending', () => { + const readiness = getDeployReadinessState({ + ...baseInput, + hasPendingOperations: true, + }) + + expect(readiness.status).toBe('saving') + expect(readiness.label).toBe('Saving...') + }) + + it('ignores queued operations before they are scoped to the active workflow', () => { + expect( + getDeployReadinessState({ + ...baseInput, + hasPendingOperations: false, + }).status + ).toBe('ready') + }) + + it('uses a neutral syncing state while external updates reconcile', () => { + const readiness = getDeployReadinessState({ + ...baseInput, + hasPendingExternalUpdate: true, + }) + + expect(readiness.status).toBe('syncing') + expect(readiness.label).toBe('Syncing...') + }) + + it('blocks deploy while copilot diff changes are under review', () => { + expect( + getDeployReadinessState({ + ...baseInput, + hasActiveDiff: true, + }).status + ).toBe('reviewing-diff') + }) + + it('surfaces reconciliation failures as deploy-blocking sync errors', () => { + const readiness = getDeployReadinessState({ + ...baseInput, + reconciliationError: 'Latest workflow changes failed to sync', + }) + + expect(readiness.status).toBe('error') + expect(readiness.tooltip).toBe('Latest workflow changes failed to sync') + }) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness.ts new file mode 100644 index 00000000000..23490ce63b3 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deploy-readiness.ts @@ -0,0 +1,159 @@ +import { useCallback, useMemo } from 'react' +import { useShallow } from 'zustand/react/shallow' +import { useOperationQueueStore } from '@/stores/operation-queue/store' +import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' + +export type DeployReadinessStatus = + | 'ready' + | 'missing-workflow' + | 'saving' + | 'reviewing-diff' + | 'syncing' + | 'error' + +interface DeployReadinessInput { + workflowId: string | null + hasPendingOperations: boolean + hasOperationError: boolean + hasActiveDiff: boolean + hasPendingExternalUpdate: boolean + isReconciling: boolean + reconciliationError?: string +} + +export interface DeployReadiness { + status: DeployReadinessStatus + isReady: boolean + isBlocked: boolean + isSyncing: boolean + label: string + tooltip: string + waitUntilReady: () => Promise +} + +export function getDeployReadinessState(input: DeployReadinessInput) { + if (!input.workflowId) { + return { + status: 'missing-workflow' as const, + label: 'Deploy', + tooltip: 'No workflow selected', + } + } + + if (input.hasOperationError || input.reconciliationError) { + return { + status: 'error' as const, + label: 'Sync failed', + tooltip: + input.reconciliationError || + 'Some changes failed to save. Reconnect or refresh before deploying.', + } + } + + if (input.hasPendingOperations) { + return { + status: 'saving' as const, + label: 'Saving...', + tooltip: 'Saving workflow changes before deployment', + } + } + + if (input.hasActiveDiff) { + return { + status: 'reviewing-diff' as const, + label: 'Reviewing...', + tooltip: 'Accept or reject the current copilot changes before deploying', + } + } + + if (input.hasPendingExternalUpdate || input.isReconciling) { + return { + status: 'syncing' as const, + label: 'Syncing...', + tooltip: 'Syncing the latest workflow changes before deployment', + } + } + + return { + status: 'ready' as const, + label: 'Ready', + tooltip: 'Ready to deploy', + } +} + +export function useDeployReadiness(workflowId: string | null): DeployReadiness { + const { hasPendingOperations, hasOperationError } = useOperationQueueStore( + useShallow((state) => ({ + hasPendingOperations: workflowId + ? state.operations.some((op) => op.workflowId === workflowId) + : false, + hasOperationError: state.hasOperationError, + })) + ) + + const { hasActiveDiff, hasPendingExternalUpdate, isReconciling, reconciliationError } = + useWorkflowDiffStore( + useShallow((state) => ({ + hasActiveDiff: state.hasActiveDiff, + hasPendingExternalUpdate: workflowId + ? Boolean(state.pendingExternalUpdates[workflowId]) + : false, + isReconciling: workflowId ? Boolean(state.reconcilingWorkflows[workflowId]) : false, + reconciliationError: workflowId ? state.reconciliationErrors[workflowId] : undefined, + })) + ) + + const readiness = useMemo( + () => + getDeployReadinessState({ + workflowId, + hasPendingOperations, + hasOperationError, + hasActiveDiff, + hasPendingExternalUpdate, + isReconciling, + reconciliationError, + }), + [ + workflowId, + hasPendingOperations, + hasOperationError, + hasActiveDiff, + hasPendingExternalUpdate, + isReconciling, + reconciliationError, + ] + ) + + const waitUntilReady = useCallback(async () => { + if (!workflowId) return false + + const queue = useOperationQueueStore.getState() + if (queue.hasOperationError) return false + + const drained = await queue.waitForWorkflowOperations(workflowId) + if (!drained) return false + + const latestQueue = useOperationQueueStore.getState() + const diff = useWorkflowDiffStore.getState() + return ( + !latestQueue.hasOperationError && + !latestQueue.hasPendingOperations(workflowId) && + !diff.hasActiveDiff && + !diff.pendingExternalUpdates[workflowId] && + !diff.reconcilingWorkflows[workflowId] && + !diff.reconciliationErrors[workflowId] + ) + }, [workflowId]) + + const isReady = readiness.status === 'ready' + const isSyncing = readiness.status === 'saving' || readiness.status === 'syncing' + + return { + ...readiness, + isReady, + isBlocked: !isReady, + isSyncing, + waitUntilReady, + } +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deployment.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deployment.ts index 3b894847c4b..9982a1d0595 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deployment.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-deployment.ts @@ -1,13 +1,22 @@ -import { useCallback } from 'react' +import { useCallback, useState } from 'react' +import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' import { runPreDeployChecks } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-predeploy-checks' import { useDeployWorkflow } from '@/hooks/queries/deployments' import { useNotificationStore } from '@/stores/notifications' +import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { mergeSubblockState } from '@/stores/workflows/utils' import { useWorkflowStore } from '@/stores/workflows/workflow/store' +import { releaseDeployAction, tryAcquireDeployAction } from './deploy-action-lock' +import { syncLocalDraftFromServer } from './sync-local-draft' +import type { DeployReadiness } from './use-deploy-readiness' + +const logger = createLogger('UseDeployment') interface UseDeploymentProps { workflowId: string | null isDeployed: boolean + deployReadiness: DeployReadiness } /** @@ -15,8 +24,9 @@ interface UseDeploymentProps { * First deploy: runs pre-deploy checks, then deploys via mutation and opens modal. * Already deployed: opens modal directly (validation happens on Update in modal). */ -export function useDeployment({ workflowId, isDeployed }: UseDeploymentProps) { +export function useDeployment({ workflowId, isDeployed, deployReadiness }: UseDeploymentProps) { const { mutateAsync, isPending: isDeploying } = useDeployWorkflow() + const [isFinalizingDeploy, setIsFinalizingDeploy] = useState(false) const addNotification = useNotificationStore((state) => state.addNotification) const handleDeployClick = useCallback(async () => { @@ -26,40 +36,102 @@ export function useDeployment({ workflowId, isDeployed }: UseDeploymentProps) { return { success: true, shouldOpenModal: true } } - const { blocks, edges, loops, parallels } = useWorkflowStore.getState() - const liveBlocks = mergeSubblockState(blocks, workflowId) - const checkResult = runPreDeployChecks({ - blocks: liveBlocks, - edges, - loops, - parallels, - workflowId, - }) - if (!checkResult.passed) { + if (!tryAcquireDeployAction(workflowId)) { addNotification({ - level: 'error', - message: checkResult.error || 'Pre-deploy validation failed', + level: 'info', + message: 'Deployment is already in progress.', workflowId, }) return { success: false, shouldOpenModal: false } } + setIsFinalizingDeploy(true) try { - await mutateAsync({ workflowId }) - return { success: true, shouldOpenModal: true } - } catch (error) { - const errorMessage = error instanceof Error ? error.message : 'Failed to deploy workflow' - addNotification({ - level: 'error', - message: errorMessage, + const isReady = await deployReadiness.waitUntilReady() + if (useWorkflowRegistry.getState().activeWorkflowId !== workflowId) { + return { success: false, shouldOpenModal: false } + } + if (!isReady) { + addNotification({ + level: deployReadiness.status === 'error' ? 'error' : 'info', + message: deployReadiness.tooltip, + workflowId, + }) + return { success: false, shouldOpenModal: false } + } + + const { blocks, edges, loops, parallels } = useWorkflowStore.getState() + const liveBlocks = mergeSubblockState(blocks, workflowId) + const checkResult = runPreDeployChecks({ + blocks: liveBlocks, + edges, + loops, + parallels, workflowId, }) - return { success: false, shouldOpenModal: false } + if (!checkResult.passed) { + addNotification({ + level: 'error', + message: checkResult.error || 'Pre-deploy validation failed', + workflowId, + }) + return { success: false, shouldOpenModal: false } + } + + try { + await mutateAsync({ workflowId }) + } catch (error) { + if (useWorkflowRegistry.getState().activeWorkflowId !== workflowId) { + return { success: false, shouldOpenModal: false } + } + const errorMessage = toError(error).message || 'Failed to deploy workflow' + addNotification({ + level: 'error', + message: errorMessage, + workflowId, + }) + return { success: false, shouldOpenModal: false } + } + + try { + const syncedActiveWorkflow = await syncLocalDraftFromServer(workflowId) + if (!syncedActiveWorkflow) { + if (useWorkflowRegistry.getState().activeWorkflowId === workflowId) { + logger.warn('Workflow deployed, but local draft sync was deferred', { workflowId }) + addNotification({ + level: 'info', + message: + 'Deployment succeeded, but local sync is still catching up. Refresh if the status looks stale.', + workflowId, + }) + } + return { success: true, shouldOpenModal: false } + } + } catch (error) { + if (useWorkflowRegistry.getState().activeWorkflowId !== workflowId) { + return { success: true, shouldOpenModal: false } + } + logger.warn('Workflow deployed, but local draft sync failed', { + workflowId, + error: toError(error).message, + }) + addNotification({ + level: 'info', + message: + 'Deployment succeeded, but local sync failed. Refresh if the status looks stale.', + workflowId, + }) + } + + return { success: true, shouldOpenModal: true } + } finally { + releaseDeployAction(workflowId) + setIsFinalizingDeploy(false) } - }, [workflowId, isDeployed, addNotification, mutateAsync]) + }, [workflowId, isDeployed, deployReadiness, addNotification, mutateAsync]) return { - isDeploying, + isDeploying: isDeploying || isFinalizingDeploy, handleDeployClick, } } diff --git a/apps/sim/background/schedule-execution.ts b/apps/sim/background/schedule-execution.ts index 3b6c4cd5bfd..7da80cab40f 100644 --- a/apps/sim/background/schedule-execution.ts +++ b/apps/sim/background/schedule-execution.ts @@ -1,12 +1,23 @@ -import { db, jobExecutionLogs, workflow, workflowSchedule } from '@sim/db' +import { + db, + jobExecutionLogs, + workflow, + workflowDeploymentVersion, + workflowSchedule, +} from '@sim/db' import { createLogger, runWithRequestContext } from '@sim/logger' import { toError } from '@sim/utils/errors' import { generateId } from '@sim/utils/id' import { task } from '@trigger.dev/sdk' import { Cron } from 'croner' import { and, eq, isNull } from 'drizzle-orm' +import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' import type { AsyncExecutionCorrelation } from '@/lib/core/async-jobs/types' -import { createTimeoutAbortController, getTimeoutErrorMessage } from '@/lib/core/execution-limits' +import { + createTimeoutAbortController, + getExecutionTimeout, + getTimeoutErrorMessage, +} from '@/lib/core/execution-limits' import { preprocessExecution } from '@/lib/execution/preprocessing' import { LoggingSession } from '@/lib/logs/execution/logging-session' import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans' @@ -15,10 +26,7 @@ import { wasExecutionFinalizedByCore, } from '@/lib/workflows/executor/execution-core' import { handlePostExecutionPauseState } from '@/lib/workflows/executor/pause-persistence' -import { - blockExistsInDeployment, - loadDeployedWorkflowState, -} from '@/lib/workflows/persistence/utils' +import { loadDeployedWorkflowState } from '@/lib/workflows/persistence/utils' import { type BlockState, calculateNextRunTime as calculateNextTime, @@ -40,7 +48,11 @@ type WorkflowScheduleUpdate = Partial type ExecutionCoreResult = Awaited> type RunWorkflowResult = - | { status: 'skip'; blocks: Record } + | { + status: 'skip' + reason: 'stale_deployment' | 'invalid_schedule' + blocks: Record + } | { status: 'success'; blocks: Record; executionResult: ExecutionCoreResult } | { status: 'failure'; blocks: Record; executionResult: ExecutionCoreResult } @@ -137,6 +149,25 @@ async function determineNextRunAfterError( return new Date(now.getTime() + 24 * 60 * 60 * 1000) } +async function isScheduleDeploymentVersionActive( + workflowId: string, + deploymentVersionId: string +): Promise { + const [activeDeployment] = await db + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.id, deploymentVersionId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + + return Boolean(activeDeployment) +} + async function runWorkflowExecution({ payload, correlation, @@ -164,16 +195,32 @@ async function runWorkflowExecution({ const blocks = deployedData.blocks const { deploymentVersionId } = deployedData + if (payload.deploymentVersionId && deploymentVersionId !== payload.deploymentVersionId) { + logger.info(`[${requestId}] Loaded deployment no longer matches queued schedule, skipping`, { + scheduleId: payload.scheduleId, + workflowId: payload.workflowId, + queuedDeploymentVersionId: payload.deploymentVersionId, + loadedDeploymentVersionId: deploymentVersionId, + }) + return { + status: 'skip', + reason: 'stale_deployment', + blocks: {} as Record, + } + } logger.info(`[${requestId}] Loaded deployed workflow ${payload.workflowId}`) if (payload.blockId) { - const blockExists = await blockExistsInDeployment(payload.workflowId, payload.blockId) - if (!blockExists) { + if (!blocks[payload.blockId]) { logger.warn( `[${requestId}] Schedule trigger block ${payload.blockId} not found in deployed workflow ${payload.workflowId}. Skipping execution.` ) - return { status: 'skip', blocks: {} as Record } + return { + status: 'skip', + reason: 'invalid_schedule', + blocks: {} as Record, + } } } @@ -199,6 +246,13 @@ async function runWorkflowExecution({ triggerType: 'schedule', triggerBlockId: payload.blockId || undefined, useDraftState: false, + workflowStateOverride: { + blocks: deployedData.blocks, + edges: deployedData.edges, + loops: deployedData.loops, + parallels: deployedData.parallels, + deploymentVersionId, + }, startTime: new Date().toISOString(), isClientSession: false, correlation, @@ -216,6 +270,22 @@ async function runWorkflowExecution({ let executionResult try { + if ( + payload.deploymentVersionId && + !(await isScheduleDeploymentVersionActive(payload.workflowId, payload.deploymentVersionId)) + ) { + logger.info(`[${requestId}] Schedule deployment changed before execution, skipping`, { + scheduleId: payload.scheduleId, + workflowId: payload.workflowId, + deploymentVersionId: payload.deploymentVersionId, + }) + return { + status: 'skip', + reason: 'stale_deployment', + blocks: {} as Record, + } + } + executionResult = await executeWorkflowCore({ snapshot, callbacks: {}, @@ -289,6 +359,7 @@ export type ScheduleExecutionPayload = { requestId?: string correlation?: AsyncExecutionCorrelation blockId?: string + deploymentVersionId?: string cronExpression?: string lastRanAt?: string failedCount?: number @@ -340,6 +411,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) { .select({ id: workflowSchedule.id, workflowId: workflowSchedule.workflowId, + deploymentVersionId: workflowSchedule.deploymentVersionId, status: workflowSchedule.status, archivedAt: workflowSchedule.archivedAt, }) @@ -367,6 +439,37 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) { return } + const expectedDeploymentVersionId = + payload.deploymentVersionId ?? scheduleRecord.deploymentVersionId ?? undefined + if (expectedDeploymentVersionId) { + const [activeDeployment] = await db + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, payload.workflowId), + eq(workflowDeploymentVersion.id, expectedDeploymentVersionId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + + if (!activeDeployment) { + logger.info(`[${requestId}] Schedule deployment version is no longer active, skipping`, { + scheduleId: payload.scheduleId, + workflowId: payload.workflowId, + deploymentVersionId: expectedDeploymentVersionId, + }) + await releaseScheduleLock( + payload.scheduleId, + requestId, + now, + `Failed to release stale deployment schedule ${payload.scheduleId}` + ) + return + } + } + const loggingSession = new LoggingSession( payload.workflowId, executionId, @@ -538,6 +641,16 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) { }) if (executionResult.status === 'skip') { + if (executionResult.reason === 'stale_deployment') { + await releaseScheduleLock( + payload.scheduleId, + requestId, + now, + `Failed to release stale schedule ${payload.scheduleId} after deployment version changed` + ) + return + } + await applyScheduleUpdate( payload.scheduleId, { @@ -899,6 +1012,8 @@ export async function executeJobInline(payload: JobExecutionPayload) { const promptText = buildJobPrompt(jobRecord) try { + const userSubscription = await getHighestPrioritySubscription(jobRecord.sourceUserId) + const mothershipJobTimeoutMs = getExecutionTimeout(userSubscription?.plan, 'sync') const url = buildAPIUrl('/api/mothership/execute') const headers = await buildAuthHeaders(jobRecord.sourceUserId) @@ -910,16 +1025,52 @@ export async function executeJobInline(payload: JobExecutionPayload) { } const startTime = new Date() - const response = await fetch(url.toString(), { - method: 'POST', - headers, - body: JSON.stringify(body), - }) - const endTime = new Date() - const durationMs = endTime.getTime() - startTime.getTime() + const timeoutController = createTimeoutAbortController(mothershipJobTimeoutMs) + try { + const response = await fetch(url.toString(), { + method: 'POST', + headers, + body: JSON.stringify(body), + signal: timeoutController.signal, + }) + + if (!response.ok) { + const errorText = await response.text().catch(() => { + if (timeoutController.isTimedOut()) { + throw new Error(getTimeoutErrorMessage(null, timeoutController.timeoutMs)) + } + return 'Unknown error' + }) + const endTime = new Date() + const durationMs = endTime.getTime() - startTime.getTime() + + await createJobLogEntry({ + scheduleId: payload.scheduleId, + workspaceId: jobRecord.sourceWorkspaceId, + jobTitle: jobRecord.jobTitle, + startTime, + endTime, + durationMs, + success: false, + errorMessage: errorText, + }) - if (!response.ok) { - const errorText = await response.text().catch(() => 'Unknown error') + throw new Error(`Mothership execution failed (${response.status}): ${errorText}`) + } + + let responseBody: Record = {} + let wasCompletedByTool = false + try { + responseBody = await response.json() + const toolCalls = responseBody?.toolCalls as Array<{ name?: string }> | undefined + wasCompletedByTool = toolCalls?.some((tc) => tc.name === 'complete_job') ?? false + } catch { + if (timeoutController.isTimedOut()) { + throw new Error(getTimeoutErrorMessage(null, timeoutController.timeoutMs)) + } + } + const endTime = new Date() + const durationMs = endTime.getTime() - startTime.getTime() await createJobLogEntry({ scheduleId: payload.scheduleId, @@ -928,92 +1079,71 @@ export async function executeJobInline(payload: JobExecutionPayload) { startTime, endTime, durationMs, - success: false, - errorMessage: errorText, + success: true, + responseBody, }) - throw new Error(`Mothership execution failed (${response.status}): ${errorText}`) - } + const newRunCount = (jobRecord.runCount || 0) + 1 - let responseBody: Record = {} - let wasCompletedByTool = false - try { - responseBody = await response.json() - const toolCalls = responseBody?.toolCalls as Array<{ name?: string }> | undefined - wasCompletedByTool = toolCalls?.some((tc) => tc.name === 'complete_job') ?? false - } catch { - // Response may not be JSON; proceed with normal flow - } + logger.info(`[${requestId}] Job executed successfully`, { + scheduleId: payload.scheduleId, + runCount: newRunCount, + wasCompletedByTool, + }) - await createJobLogEntry({ - scheduleId: payload.scheduleId, - workspaceId: jobRecord.sourceWorkspaceId, - jobTitle: jobRecord.jobTitle, - startTime, - endTime, - durationMs, - success: true, - responseBody, - }) + if (wasCompletedByTool) { + await applyScheduleUpdate( + payload.scheduleId, + { + lastRanAt: now, + updatedAt: now, + runCount: newRunCount, + failedCount: 0, + lastQueuedAt: null, + }, + requestId, + `Error updating job ${payload.scheduleId} after completion` + ) + return + } - const newRunCount = (jobRecord.runCount || 0) + 1 + const isOneTime = !jobRecord.cronExpression + let nextRunAt: Date | null = null - logger.info(`[${requestId}] Job executed successfully`, { - scheduleId: payload.scheduleId, - runCount: newRunCount, - wasCompletedByTool, - }) + if (!isOneTime && jobRecord.cronExpression) { + const validation = validateCronExpression( + jobRecord.cronExpression, + jobRecord.timezone || 'UTC' + ) + nextRunAt = validation.nextRun || null + } + + const maxRunsReached = jobRecord.maxRuns && newRunCount >= jobRecord.maxRuns + if (maxRunsReached) { + logger.info(`[${requestId}] Job hit maxRuns limit`, { + scheduleId: payload.scheduleId, + maxRuns: jobRecord.maxRuns, + runCount: newRunCount, + }) + } - if (wasCompletedByTool) { await applyScheduleUpdate( payload.scheduleId, { lastRanAt: now, updatedAt: now, - runCount: newRunCount, + nextRunAt: isOneTime || maxRunsReached ? null : nextRunAt, failedCount: 0, lastQueuedAt: null, + runCount: newRunCount, + status: isOneTime || maxRunsReached ? 'completed' : 'active', }, requestId, - `Error updating job ${payload.scheduleId} after completion` - ) - return - } - - const isOneTime = !jobRecord.cronExpression - let nextRunAt: Date | null = null - - if (!isOneTime && jobRecord.cronExpression) { - const validation = validateCronExpression( - jobRecord.cronExpression, - jobRecord.timezone || 'UTC' + `Error updating job ${payload.scheduleId} after success` ) - nextRunAt = validation.nextRun || null - } - - const maxRunsReached = jobRecord.maxRuns && newRunCount >= jobRecord.maxRuns - if (maxRunsReached) { - logger.info(`[${requestId}] Job hit maxRuns limit`, { - scheduleId: payload.scheduleId, - maxRuns: jobRecord.maxRuns, - runCount: newRunCount, - }) + } finally { + timeoutController.cleanup() } - - await applyScheduleUpdate( - payload.scheduleId, - { - lastRanAt: now, - updatedAt: now, - nextRunAt: isOneTime || maxRunsReached ? null : nextRunAt, - failedCount: 0, - lastQueuedAt: null, - runCount: newRunCount, - status: isOneTime || maxRunsReached ? 'completed' : 'active', - }, - requestId, - `Error updating job ${payload.scheduleId} after success` - ) } catch (error) { const errorMessage = toError(error).message logger.error(`[${requestId}] Job execution failed`, { diff --git a/apps/sim/executor/execution/block-executor.ts b/apps/sim/executor/execution/block-executor.ts index 3803e53ffd6..48d1e970410 100644 --- a/apps/sim/executor/execution/block-executor.ts +++ b/apps/sim/executor/execution/block-executor.ts @@ -103,6 +103,7 @@ export class BlockExecutor { } let resolvedInputs: Record = {} + let inputsForLog: Record = {} const nodeMetadata = { ...this.buildNodeMetadata(node), @@ -120,15 +121,20 @@ export class BlockExecutor { } if (block.metadata?.id === BlockType.FUNCTION) { - const { resolvedInputs: fnInputs, contextVariables } = - this.resolver.resolveInputsForFunctionBlock(ctx, node.id, block.config.params, block) + const { + resolvedInputs: fnInputs, + displayInputs, + contextVariables, + } = this.resolver.resolveInputsForFunctionBlock(ctx, node.id, block.config.params, block) resolvedInputs = { ...fnInputs, [FUNCTION_BLOCK_CONTEXT_VARS_KEY]: contextVariables } + inputsForLog = displayInputs } else { resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block) + inputsForLog = resolvedInputs } if (blockLog) { - blockLog.input = this.sanitizeInputsForLog(resolvedInputs) + blockLog.input = this.sanitizeInputsForLog(inputsForLog) } } catch (error) { cleanupSelfReference?.() @@ -139,7 +145,7 @@ export class BlockExecutor { block, startTime, blockLog, - resolvedInputs, + inputsForLog, isSentinel, 'input_resolution' ) @@ -212,7 +218,7 @@ export class BlockExecutor { ctx, node, block, - this.sanitizeInputsForLog(resolvedInputs), + this.sanitizeInputsForLog(inputsForLog), displayOutput, duration, blockLog.startedAt, @@ -231,7 +237,7 @@ export class BlockExecutor { block, startTime, blockLog, - resolvedInputs, + inputsForLog, isSentinel, 'execution' ) @@ -262,19 +268,18 @@ export class BlockExecutor { block: SerializedBlock, startTime: number, blockLog: BlockLog | undefined, - resolvedInputs: Record, + inputsForLog: Record, isSentinel: boolean, phase: 'input_resolution' | 'execution' ): Promise { const endedAt = new Date().toISOString() const duration = performance.now() - startTime const errorMessage = normalizeError(error) - const hasResolvedInputs = - resolvedInputs && typeof resolvedInputs === 'object' && Object.keys(resolvedInputs).length > 0 - const input = - hasResolvedInputs && resolvedInputs - ? resolvedInputs - : ((block.config?.params as Record | undefined) ?? {}) + const hasLogInputs = + inputsForLog && typeof inputsForLog === 'object' && Object.keys(inputsForLog).length > 0 + const input = hasLogInputs + ? inputsForLog + : ((block.config?.params as Record | undefined) ?? {}) const errorOutput: NormalizedBlockOutput = { error: errorMessage, diff --git a/apps/sim/executor/variables/resolver.test.ts b/apps/sim/executor/variables/resolver.test.ts index 915afa54ae6..545d4fa91de 100644 --- a/apps/sim/executor/variables/resolver.test.ts +++ b/apps/sim/executor/variables/resolver.test.ts @@ -66,7 +66,7 @@ describe('VariableResolver function block inputs', () => { const result = resolver.resolveInputsForFunctionBlock(ctx, 'function', undefined, block) - expect(result).toEqual({ resolvedInputs: {}, contextVariables: {} }) + expect(result).toEqual({ resolvedInputs: {}, displayInputs: {}, contextVariables: {} }) }) it('resolves JavaScript block references through globalThis context variables', () => { @@ -80,6 +80,7 @@ describe('VariableResolver function block inputs', () => { ) expect(result.resolvedInputs.code).toBe('return globalThis["__blockRef_0"]') + expect(result.displayInputs.code).toBe('return ') expect(result.contextVariables).toEqual({ __blockRef_0: 'hello world' }) }) @@ -94,6 +95,7 @@ describe('VariableResolver function block inputs', () => { ) expect(result.resolvedInputs.code).toBe('return globals()["__blockRef_0"]') + expect(result.displayInputs.code).toBe('return ') expect(result.contextVariables).toEqual({ __blockRef_0: 'hello world' }) }) @@ -110,6 +112,7 @@ describe('VariableResolver function block inputs', () => { expect(result.resolvedInputs.code).toBe( 'a = globals()["__blockRef_0"]\nb = globals()["__blockRef_1"]\nreturn b' ) + expect(result.displayInputs.code).toBe('a = \nb = \nreturn b') expect(result.contextVariables).toEqual({ __blockRef_0: ['a', 'b'], __blockRef_1: ['a', 'b'], @@ -129,6 +132,9 @@ describe('VariableResolver function block inputs', () => { expect(result.resolvedInputs.code).toBe( `echo "\${__blockRef_0}"suffix && echo "\${__blockRef_1}"` ) + expect(result.displayInputs.code).toBe( + 'echo suffix && echo ""' + ) expect(result.contextVariables).toEqual({ __blockRef_0: 'hello world', __blockRef_1: 'hello world', @@ -148,6 +154,7 @@ describe('VariableResolver function block inputs', () => { expect(result.resolvedInputs.code).toBe( `# don't confuse quote tracking\necho "\${__blockRef_0}"` ) + expect(result.displayInputs.code).toBe("# don't confuse quote tracking\necho ") expect(result.contextVariables).toEqual({ __blockRef_0: 'hello world' }) }) }) diff --git a/apps/sim/executor/variables/resolver.ts b/apps/sim/executor/variables/resolver.ts index 2cc9fd89e5b..d8566278e82 100644 --- a/apps/sim/executor/variables/resolver.ts +++ b/apps/sim/executor/variables/resolver.ts @@ -47,27 +47,32 @@ export class VariableResolver { * are stored as named context variables instead of being embedded as JavaScript * literals, preventing large values from bloating the code string. * - * Returns the resolved inputs and a `contextVariables` map. Callers should inject - * contextVariables into the function execution request body so the isolated VM can - * access them as global variables. + * Returns runtime inputs, display inputs, and a `contextVariables` map. Callers + * should inject contextVariables into the function execution request body so the + * isolated VM can access them as global variables. */ resolveInputsForFunctionBlock( ctx: ExecutionContext, currentNodeId: string, params: Record | null | undefined, block: SerializedBlock - ): { resolvedInputs: Record; contextVariables: Record } { + ): { + resolvedInputs: Record + displayInputs: Record + contextVariables: Record + } { const contextVariables: Record = {} const resolved: Record = {} + const display: Record = {} if (!params) { - return { resolvedInputs: resolved, contextVariables } + return { resolvedInputs: resolved, displayInputs: display, contextVariables } } for (const [key, value] of Object.entries(params)) { if (key === 'code') { if (typeof value === 'string') { - resolved[key] = this.resolveCodeWithContextVars( + const code = this.resolveCodeWithContextVars( ctx, currentNodeId, value, @@ -75,32 +80,47 @@ export class VariableResolver { block, contextVariables ) + resolved[key] = code.resolvedCode + display[key] = code.displayCode } else if (Array.isArray(value)) { - resolved[key] = value.map((item: any) => { + const resolvedItems: any[] = [] + const displayItems: any[] = [] + for (const item of value) { if (item && typeof item === 'object' && typeof item.content === 'string') { - return { + const code = this.resolveCodeWithContextVars( + ctx, + currentNodeId, + item.content, + undefined, + block, + contextVariables + ) + resolvedItems.push({ ...item, - content: this.resolveCodeWithContextVars( - ctx, - currentNodeId, - item.content, - undefined, - block, - contextVariables - ), - } + content: code.resolvedCode, + }) + displayItems.push({ + ...item, + content: code.displayCode, + }) + continue } - return item - }) + resolvedItems.push(item) + displayItems.push(item) + } + resolved[key] = resolvedItems + display[key] = displayItems } else { resolved[key] = this.resolveValue(ctx, currentNodeId, value, undefined, block) + display[key] = resolved[key] } } else { resolved[key] = this.resolveValue(ctx, currentNodeId, value, undefined, block) + display[key] = resolved[key] } } - return { resolvedInputs: resolved, contextVariables } + return { resolvedInputs: resolved, displayInputs: display, contextVariables } } resolveInputs( @@ -230,7 +250,7 @@ export class VariableResolver { loopScope: LoopScope | undefined, block: SerializedBlock, contextVarAccumulator: Record - ): string { + ): { resolvedCode: string; displayCode: string } { const resolutionContext: ResolutionContext = { executionContext: ctx, executionState: this.state, @@ -243,14 +263,21 @@ export class VariableResolver { | undefined let replacementError: Error | null = null + let displayResult = '' + let displayCursor = 0 let result = replaceValidReferences(template, (match, index) => { if (replacementError) return match + displayResult += template.slice(displayCursor, index) + displayCursor = index + match.length try { if (this.blockResolver.canResolve(match)) { const resolved = this.resolveReference(match, resolutionContext) - if (resolved === undefined) return match + if (resolved === undefined) { + displayResult += match + return match + } const effectiveValue = resolved === RESOLVED_EMPTY ? null : resolved @@ -265,21 +292,33 @@ export class VariableResolver { index, effectiveValue ) + displayResult += match return replacement } const resolved = this.resolveReference(match, resolutionContext) - if (resolved === undefined) return match + if (resolved === undefined) { + displayResult += match + return match + } const effectiveValue = resolved === RESOLVED_EMPTY ? null : resolved // Non-block reference (loop, parallel, workflow, env): embed as literal - return this.blockResolver.formatValueForBlock(effectiveValue, BlockType.FUNCTION, language) + const replacement = this.blockResolver.formatValueForBlock( + effectiveValue, + BlockType.FUNCTION, + language + ) + displayResult += replacement + return replacement } catch (error) { replacementError = error instanceof Error ? error : new Error(String(error)) + displayResult += match return match } }) + displayResult += template.slice(displayCursor) if (replacementError !== null) { throw replacementError @@ -289,8 +328,12 @@ export class VariableResolver { const resolved = this.resolveReference(match, resolutionContext) return typeof resolved === 'string' ? resolved : match }) + displayResult = displayResult.replace(createEnvVarPattern(), (match) => { + const resolved = this.resolveReference(match, resolutionContext) + return typeof resolved === 'string' ? resolved : match + }) - return result + return { resolvedCode: result, displayCode: displayResult } } private formatContextVariableReference( diff --git a/apps/sim/hooks/queries/deployments.test.ts b/apps/sim/hooks/queries/deployments.test.ts index 20701175468..0e9559d2087 100644 --- a/apps/sim/hooks/queries/deployments.test.ts +++ b/apps/sim/hooks/queries/deployments.test.ts @@ -2,7 +2,7 @@ * @vitest-environment node */ import { beforeEach, describe, expect, it, vi } from 'vitest' -import { invalidateDeploymentQueries } from '@/hooks/queries/deployments' +import { invalidateDeploymentQueries, refetchDeploymentBoundary } from '@/hooks/queries/deployments' import { fetchDeploymentVersionState } from '@/hooks/queries/utils/fetch-deployment-version-state' describe('deployment query helpers', () => { @@ -10,21 +10,41 @@ describe('deployment query helpers', () => { vi.clearAllMocks() }) - it('invalidates the deployment info, state, and versions queries', async () => { + it('invalidates the deployment info, state, versions, and public surface queries', async () => { const queryClient = { invalidateQueries: vi.fn().mockResolvedValue(undefined), } await invalidateDeploymentQueries(queryClient as any, 'wf-1') - expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(1, { + expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(5) + expect(queryClient.invalidateQueries.mock.calls.map(([call]) => call)).toEqual( + expect.arrayContaining([ + { queryKey: ['deployments', 'info', 'wf-1'] }, + { queryKey: ['deployments', 'deployedState', 'wf-1'] }, + { queryKey: ['deployments', 'versions', 'wf-1'] }, + { queryKey: ['deployments', 'chatStatus', 'wf-1'] }, + { queryKey: ['deployments', 'formStatus', 'wf-1'] }, + ]) + ) + }) + + it('refetches the deploy comparison boundary after invalidating it', async () => { + const queryClient = { + invalidateQueries: vi.fn().mockResolvedValue(undefined), + refetchQueries: vi.fn().mockResolvedValue(undefined), + } + + await refetchDeploymentBoundary(queryClient as any, 'wf-1') + + expect(queryClient.refetchQueries).toHaveBeenCalledWith({ queryKey: ['deployments', 'info', 'wf-1'], }) - expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(2, { + expect(queryClient.refetchQueries).toHaveBeenCalledWith({ queryKey: ['deployments', 'deployedState', 'wf-1'], }) - expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(3, { - queryKey: ['deployments', 'versions', 'wf-1'], + expect(queryClient.refetchQueries).toHaveBeenCalledWith({ + queryKey: ['workflows', 'state', 'wf-1'], }) }) diff --git a/apps/sim/hooks/queries/deployments.ts b/apps/sim/hooks/queries/deployments.ts index e7b838a3e5e..8370f394320 100644 --- a/apps/sim/hooks/queries/deployments.ts +++ b/apps/sim/hooks/queries/deployments.ts @@ -1,7 +1,7 @@ import { useCallback } from 'react' import { createLogger } from '@sim/logger' import type { QueryClient } from '@tanstack/react-query' -import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query' import { requestJson, requestRaw } from '@/lib/api/client/request' import { type ActivateDeploymentVersionResponse, @@ -69,6 +69,15 @@ export function invalidateDeploymentQueries(queryClient: QueryClient, workflowId ]) } +export async function refetchDeploymentBoundary(queryClient: QueryClient, workflowId: string) { + await invalidateDeploymentQueries(queryClient, workflowId) + await Promise.all([ + queryClient.refetchQueries({ queryKey: deploymentKeys.info(workflowId) }), + queryClient.refetchQueries({ queryKey: deploymentKeys.deployedState(workflowId) }), + queryClient.refetchQueries({ queryKey: workflowKeys.state(workflowId) }), + ]) +} + export type WorkflowDeploymentInfo = DeploymentInfoResponse & { deployedAt: string | null apiKey: string | null @@ -109,7 +118,6 @@ export function useDeploymentInfo( queryFn: ({ signal }) => fetchDeploymentInfo(workflowId!, signal), enabled: Boolean(workflowId) && (options?.enabled ?? true), staleTime: 30 * 1000, // 30 seconds - placeholderData: keepPreviousData, ...(options?.refetchOnMount !== undefined && { refetchOnMount: options.refetchOnMount }), }) } @@ -141,7 +149,6 @@ export function useDeployedWorkflowState( queryFn: ({ signal }) => fetchDeployedWorkflowState(workflowId!, signal), enabled: Boolean(workflowId) && (options?.enabled ?? true), staleTime: 30 * 1000, - placeholderData: keepPreviousData, }) } @@ -171,7 +178,6 @@ export function useDeploymentVersions(workflowId: string | null, options?: { ena queryFn: ({ signal }) => fetchDeploymentVersions(workflowId!, signal), enabled: Boolean(workflowId) && (options?.enabled ?? true), staleTime: 30 * 1000, // 30 seconds - placeholderData: keepPreviousData, }) } @@ -205,7 +211,6 @@ export function useChatDeploymentStatus( queryFn: ({ signal }) => fetchChatDeploymentStatus(workflowId!, signal), enabled: Boolean(workflowId) && (options?.enabled ?? true), staleTime: 30 * 1000, // 30 seconds - placeholderData: keepPreviousData, }) } @@ -229,7 +234,6 @@ export function useChatDetail(chatId: string | null, options?: { enabled?: boole queryFn: ({ signal }) => fetchChatDetail(chatId!, signal), enabled: Boolean(chatId) && (options?.enabled ?? true), staleTime: 30 * 1000, // 30 seconds - placeholderData: keepPreviousData, }) } @@ -239,6 +243,7 @@ export function useChatDetail(chatId: string | null, options?: { enabled?: boole * Returns the combined result. */ export function useChatDeploymentInfo(workflowId: string | null, options?: { enabled?: boolean }) { + const queryClient = useQueryClient() const statusQuery = useChatDeploymentStatus(workflowId, options) const chatId = statusQuery.data?.deployment?.id ?? null @@ -249,10 +254,15 @@ export function useChatDeploymentInfo(workflowId: string | null, options?: { ena const refetch = useCallback(async () => { const statusResult = await statusQuery.refetch() - if (statusResult.data?.deployment?.id) { - await detailQuery.refetch() + const nextChatId = statusResult.data?.deployment?.id + if (nextChatId) { + await queryClient.fetchQuery({ + queryKey: deploymentKeys.chatDetail(nextChatId), + queryFn: ({ signal }) => fetchChatDetail(nextChatId, signal), + staleTime: 30 * 1000, + }) } - }, [statusQuery.refetch, detailQuery.refetch]) + }, [queryClient, statusQuery.refetch]) return { isLoading: @@ -299,15 +309,10 @@ export function useDeployWorkflow() { onSettled: (_data, error, variables) => { if (error) { logger.error('Failed to deploy workflow', { error }) - } else { - logger.info('Workflow deployed successfully', { workflowId: variables.workflowId }) + return invalidateDeploymentQueries(queryClient, variables.workflowId) } - return Promise.all([ - invalidateDeploymentQueries(queryClient, variables.workflowId), - queryClient.invalidateQueries({ - queryKey: workflowKeys.state(variables.workflowId), - }), - ]) + logger.info('Workflow deployed successfully', { workflowId: variables.workflowId }) + return refetchDeploymentBoundary(queryClient, variables.workflowId) }, }) } @@ -327,8 +332,8 @@ export function useUndeployWorkflow() { const queryClient = useQueryClient() return useMutation({ - mutationFn: async ({ workflowId }: UndeployWorkflowVariables): Promise => { - await requestJson(undeployWorkflowContract, { + mutationFn: async ({ workflowId }: UndeployWorkflowVariables) => { + return requestJson(undeployWorkflowContract, { params: { id: workflowId }, }) }, diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts index 5f3b16e56ee..3ac879f7eed 100644 --- a/apps/sim/hooks/use-collaborative-workflow.ts +++ b/apps/sim/hooks/use-collaborative-workflow.ts @@ -24,11 +24,19 @@ import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants' import { invalidateDeploymentQueries } from '@/hooks/queries/deployments' import { useUndoRedo } from '@/hooks/use-undo-redo' import { useNotificationStore } from '@/stores/notifications' -import { registerEmitFunctions, useOperationQueue } from '@/stores/operation-queue/store' +import { + registerEmitFunctions, + useOperationQueue, + useOperationQueueStore, +} from '@/stores/operation-queue/store' import { usePanelEditorStore } from '@/stores/panel' import { useCodeUndoRedoStore, useUndoRedoStore } from '@/stores/undo-redo' import { useVariablesStore } from '@/stores/variables/store' import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' +import { + applyWorkflowStateToStores, + WORKFLOW_DIFF_SETTLED_EVENT, +} from '@/stores/workflow-diff/utils' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { filterNewEdges, filterValidEdges, mergeSubblockState } from '@/stores/workflows/utils' @@ -154,6 +162,7 @@ export function useCollaborativeWorkflow() { // Track if we're applying remote changes to avoid infinite loops const isApplyingRemoteChange = useRef(false) + const reloadSequencesRef = useRef>({}) const { addToQueue, @@ -563,6 +572,26 @@ export function useCollaborativeWorkflow() { } const reloadWorkflowFromApi = async (workflowId: string, reason: string): Promise => { + const reloadSequence = (reloadSequencesRef.current[workflowId] ?? 0) + 1 + reloadSequencesRef.current[workflowId] = reloadSequence + const isLatestReload = () => reloadSequencesRef.current[workflowId] === reloadSequence + const pendingExternalUpdateAtStart = + useWorkflowDiffStore.getState().pendingExternalUpdates[workflowId] ?? 0 + useWorkflowDiffStore.getState().setWorkflowReconciliationInProgress(workflowId, true) + const failLatestReconciliation = (message: string) => { + if (!isLatestReload()) return + const diffStore = useWorkflowDiffStore.getState() + if ((diffStore.pendingExternalUpdates[workflowId] ?? 0) <= pendingExternalUpdateAtStart) { + diffStore.clearExternalUpdatePending(workflowId) + } + diffStore.setWorkflowReconciliationInProgress(workflowId, false) + diffStore.setWorkflowReconciliationError(workflowId, message) + if ((useWorkflowDiffStore.getState().pendingExternalUpdates[workflowId] ?? 0) > 0) { + window.dispatchEvent( + new CustomEvent(WORKFLOW_DIFF_SETTLED_EVENT, { detail: { workflowId } }) + ) + } + } // The contract's `state` is `workflowStateSchema` (loose at the wire // level — `subBlocks.value` is `unknown`, optional flags omitted), // but downstream consumers (replaceWorkflowState, the undo/redo @@ -579,41 +608,72 @@ export function useCollaborativeWorkflow() { if (wireState) { // double-cast-allowed: workflowStateSchema is structurally a supertype of the store's WorkflowState (subBlocks.value is `unknown`, optional booleans, etc.); the server persists store-shaped values so the runtime shape matches workflowState = wireState as unknown as WorkflowState + if (Object.hasOwn(responseData.data, 'variables')) { + workflowState.variables = responseData.data.variables || {} + } } } catch (error) { logger.error(`Failed to fetch workflow data after ${reason}`, { error }) + failLatestReconciliation( + 'Failed to sync the latest workflow changes. Refresh and try again.' + ) + return false + } + + if (!isLatestReload()) { + logger.debug(`Ignoring stale workflow reload after ${reason}`, { workflowId }) return false } if (!workflowState) { logger.error(`No state found in workflow data after ${reason}`, { workflowId }) + failLatestReconciliation('No workflow state was returned while syncing latest changes.') + return false + } + + if (useWorkflowRegistry.getState().activeWorkflowId !== workflowId) { + logger.debug(`Ignoring workflow reload after active workflow changed`, { workflowId }) + if (isLatestReload()) { + useWorkflowDiffStore.getState().setWorkflowReconciliationInProgress(workflowId, false) + } + return false + } + + const diffStateBeforeApply = useWorkflowDiffStore.getState() + const pendingExternalUpdateBeforeApply = + diffStateBeforeApply.pendingExternalUpdates[workflowId] ?? 0 + if ( + diffStateBeforeApply.hasActiveDiff || + pendingExternalUpdateBeforeApply > pendingExternalUpdateAtStart || + useOperationQueueStore.getState().hasPendingOperations(workflowId) + ) { + logger.info(`Deferring workflow reload apply after ${reason}`, { workflowId }) + useWorkflowDiffStore.getState().markExternalUpdatePending(workflowId) + if (isLatestReload()) { + useWorkflowDiffStore.getState().setWorkflowReconciliationInProgress(workflowId, false) + if (useWorkflowRegistry.getState().activeWorkflowId === workflowId) { + void replayPendingExternalUpdate( + workflowId, + 'deferred external update after reload apply was skipped' + ) + } + } return false } isApplyingRemoteChange.current = true try { - useWorkflowStore.getState().replaceWorkflowState({ + const stateToApply: WorkflowState = { blocks: workflowState.blocks || {}, edges: workflowState.edges || [], loops: workflowState.loops || {}, parallels: workflowState.parallels || {}, lastSaved: workflowState.lastSaved || Date.now(), - }) - - const subblockValues: Record> = {} - Object.entries(workflowState.blocks || {}).forEach(([blockId, block]) => { - subblockValues[blockId] = {} - Object.entries(block.subBlocks || {}).forEach(([subblockId, subblock]) => { - subblockValues[blockId][subblockId] = subblock?.value - }) - }) - - useSubBlockStore.setState((state) => ({ - workflowValues: { - ...state.workflowValues, - [workflowId]: subblockValues, - }, - })) + } + if (Object.hasOwn(workflowState, 'variables')) { + stateToApply.variables = workflowState.variables || {} + } + applyWorkflowStateToStores(workflowId, stateToApply) const graph = { blocksById: workflowState.blocks || {}, @@ -630,9 +690,47 @@ export function useCollaborativeWorkflow() { }) logger.info(`Successfully reloaded workflow state after ${reason}`, { workflowId }) + const diffStore = useWorkflowDiffStore.getState() + const pendingExternalUpdate = diffStore.pendingExternalUpdates[workflowId] ?? 0 + if (pendingExternalUpdate <= pendingExternalUpdateAtStart) { + diffStore.clearExternalUpdatePending(workflowId) + } + diffStore.setWorkflowReconciliationError(workflowId, null) return true } finally { isApplyingRemoteChange.current = false + if (isLatestReload()) { + useWorkflowDiffStore.getState().setWorkflowReconciliationInProgress(workflowId, false) + if (useWorkflowRegistry.getState().activeWorkflowId === workflowId) { + void replayPendingExternalUpdate( + workflowId, + 'deferred external update after reconciliation' + ) + } + } + } + } + + const replayPendingExternalUpdate = async (workflowId: string, reason: string) => { + const diffStore = useWorkflowDiffStore.getState() + if ( + useWorkflowRegistry.getState().activeWorkflowId !== workflowId || + diffStore.hasActiveDiff || + diffStore.reconcilingWorkflows[workflowId] || + !diffStore.pendingExternalUpdates[workflowId] + ) { + return + } + + const queueStore = useOperationQueueStore.getState() + if (queueStore.hasPendingOperations(workflowId)) { + return + } + + try { + await reloadWorkflowFromApi(workflowId, reason) + } catch (error) { + logger.error(`Error reloading workflow state after ${reason}:`, error) } } @@ -641,6 +739,7 @@ export function useCollaborativeWorkflow() { logger.info(`Workflow ${workflowId} has been reverted to deployed state`) if (activeWorkflowId !== workflowId) return + useWorkflowDiffStore.getState().markRemoteUpdateSeen(workflowId) try { await reloadWorkflowFromApi(workflowId, 'revert') @@ -655,12 +754,48 @@ export function useCollaborativeWorkflow() { if (activeWorkflowId !== workflowId) return - const { hasActiveDiff } = useWorkflowDiffStore.getState() + const diffStore = useWorkflowDiffStore.getState() + const { hasActiveDiff } = diffStore if (hasActiveDiff) { - logger.info('Skipping workflow-updated: active diff in progress', { workflowId }) + logger.info('Deferring workflow-updated: active diff in progress', { workflowId }) + diffStore.markExternalUpdatePending(workflowId) + return + } + + if (diffStore.reconcilingWorkflows[workflowId]) { + logger.info('Deferring workflow-updated: workflow reconciliation is in progress', { + workflowId, + }) + diffStore.markExternalUpdatePending(workflowId) + return + } + + const operationQueue = useOperationQueueStore.getState() + if (operationQueue.hasPendingOperations(workflowId)) { + logger.info('Deferring workflow-updated: local operations are still pending', { + workflowId, + }) + diffStore.markExternalUpdatePending(workflowId) + void operationQueue.waitForWorkflowOperations(workflowId).then((ready) => { + if (!ready) { + const latestQueue = useOperationQueueStore.getState() + if (latestQueue.hasPendingOperations(workflowId) && !latestQueue.hasOperationError) { + return + } + const diffStore = useWorkflowDiffStore.getState() + diffStore.clearExternalUpdatePending(workflowId) + diffStore.setWorkflowReconciliationError( + workflowId, + 'Failed to save local workflow changes before syncing external updates.' + ) + return + } + void replayPendingExternalUpdate(workflowId, 'deferred external update after local save') + }) return } + diffStore.markRemoteUpdateSeen(workflowId) try { await reloadWorkflowFromApi(workflowId, 'external update') } catch (error) { @@ -668,6 +803,16 @@ export function useCollaborativeWorkflow() { } } + const handleDiffSettled = async (event: Event) => { + const customEvent = event as CustomEvent<{ workflowId?: string }> + const workflowId = customEvent.detail?.workflowId + if (!workflowId || activeWorkflowId !== workflowId) return + const diffStore = useWorkflowDiffStore.getState() + if (!diffStore.pendingExternalUpdates[workflowId]) return + + await replayPendingExternalUpdate(workflowId, 'deferred external update') + } + const handleWorkflowDeployed = (data: any) => { const { workflowId } = data logger.info(`Workflow ${workflowId} deployment state changed`) @@ -681,6 +826,12 @@ export function useCollaborativeWorkflow() { const { operationId } = data logger.debug('Operation confirmed', { operationId }) confirmOperation(operationId) + if (activeWorkflowId) { + void replayPendingExternalUpdate( + activeWorkflowId, + 'deferred external update after operation confirm' + ) + } } const handleOperationFailed = (data: any) => { @@ -699,6 +850,18 @@ export function useCollaborativeWorkflow() { onWorkflowDeployed(handleWorkflowDeployed) onOperationConfirmed(handleOperationConfirmed) onOperationFailed(handleOperationFailed) + window.addEventListener(WORKFLOW_DIFF_SETTLED_EVENT, handleDiffSettled) + + if (activeWorkflowId) { + void replayPendingExternalUpdate( + activeWorkflowId, + 'pending external update after workflow activation' + ) + } + + return () => { + window.removeEventListener(WORKFLOW_DIFF_SETTLED_EVENT, handleDiffSettled) + } }, [ onWorkflowOperation, onSubblockUpdate, diff --git a/apps/sim/lib/api/contracts/v1/admin/workflows.ts b/apps/sim/lib/api/contracts/v1/admin/workflows.ts index 8ae9dffe35f..daed3f008f2 100644 --- a/apps/sim/lib/api/contracts/v1/admin/workflows.ts +++ b/apps/sim/lib/api/contracts/v1/admin/workflows.ts @@ -141,6 +141,7 @@ export const adminV1DeployResultSchema = z.object({ export const adminV1UndeployResultSchema = z.object({ isDeployed: z.literal(false), + warnings: z.array(z.string()).optional(), }) export const adminV1ExportWorkflowsBodySchema = z.object({ diff --git a/apps/sim/lib/copilot/tools/handlers/workflow/mutations.test.ts b/apps/sim/lib/copilot/tools/handlers/workflow/mutations.test.ts new file mode 100644 index 00000000000..8ca65bbdf2c --- /dev/null +++ b/apps/sim/lib/copilot/tools/handlers/workflow/mutations.test.ts @@ -0,0 +1,127 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { ensureWorkflowAccessMock, setWorkflowVariablesMock, recordAuditMock } = vi.hoisted(() => ({ + ensureWorkflowAccessMock: vi.fn(), + setWorkflowVariablesMock: vi.fn(), + recordAuditMock: vi.fn(), +})) + +vi.mock('@sim/audit', () => ({ + AuditAction: { WORKFLOW_VARIABLES_UPDATED: 'WORKFLOW_VARIABLES_UPDATED' }, + AuditResourceType: { WORKFLOW: 'WORKFLOW' }, + recordAudit: recordAuditMock, +})) + +vi.mock('@sim/db', () => ({ + db: {}, + workflow: {}, +})) + +vi.mock('@/lib/api-key/auth', () => ({ + createWorkspaceApiKey: vi.fn(), +})) + +vi.mock('@/lib/core/config/env', () => ({ + env: { INTERNAL_API_SECRET: 'secret' }, +})) + +vi.mock('@/lib/core/utils/request', () => ({ + generateRequestId: () => 'request-1', +})) + +vi.mock('@/lib/core/utils/urls', () => ({ + getSocketServerUrl: () => 'http://socket.test', +})) + +vi.mock('@/lib/workflows/executor/execute-workflow', () => ({ + executeWorkflow: vi.fn(), +})) + +vi.mock('@/lib/workflows/executor/execution-state', () => ({ + getExecutionState: vi.fn(), + getLatestExecutionState: vi.fn(), +})) + +vi.mock('@/lib/workflows/orchestration', () => ({ + performDeleteFolder: vi.fn(), + performDeleteWorkflow: vi.fn(), +})) + +vi.mock('@/lib/workflows/persistence/utils', () => ({ + loadWorkflowFromNormalizedTables: vi.fn(), + saveWorkflowToNormalizedTables: vi.fn(), +})) + +vi.mock('@/lib/workflows/sanitization/json-sanitizer', () => ({ + sanitizeForCopilot: vi.fn((state) => state), +})) + +vi.mock('@/lib/workflows/utils', () => ({ + checkForCircularReference: vi.fn(), + createFolderRecord: vi.fn(), + createWorkflowRecord: vi.fn(), + listFolders: vi.fn(), + setWorkflowVariables: setWorkflowVariablesMock, + updateFolderRecord: vi.fn(), + updateWorkflowRecord: vi.fn(), + verifyFolderWorkspace: vi.fn(), +})) + +vi.mock('@/executor/utils/errors', () => ({ + hasExecutionResult: vi.fn(() => false), +})) + +vi.mock('../access', () => ({ + ensureWorkflowAccess: ensureWorkflowAccessMock, + ensureWorkspaceAccess: vi.fn(), + getDefaultWorkspaceId: vi.fn(), +})) + +import { executeSetGlobalWorkflowVariables } from './mutations' + +describe('executeSetGlobalWorkflowVariables', () => { + beforeEach(() => { + vi.clearAllMocks() + global.fetch = vi.fn().mockResolvedValue(new Response(null, { status: 200 })) as typeof fetch + ensureWorkflowAccessMock.mockResolvedValue({ + workflow: { + id: 'workflow-1', + variables: {}, + }, + }) + setWorkflowVariablesMock.mockResolvedValue(undefined) + }) + + it('persists variable changes and notifies clients that workflow state changed', async () => { + const result = await executeSetGlobalWorkflowVariables( + { + workflowId: 'workflow-1', + operations: [{ operation: 'add', name: 'threshold', type: 'number', value: '5' }], + }, + { userId: 'user-1' } as any + ) + + expect(result.success).toBe(true) + const [, variables] = setWorkflowVariablesMock.mock.calls[0] + expect(Object.values(variables)).toEqual([ + expect.objectContaining({ + workflowId: 'workflow-1', + name: 'threshold', + type: 'number', + value: 5, + }), + ]) + expect(global.fetch).toHaveBeenCalledWith('http://socket.test/api/workflow-updated', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-api-key': 'secret', + }, + body: JSON.stringify({ workflowId: 'workflow-1' }), + }) + expect(recordAuditMock).toHaveBeenCalled() + }) +}) diff --git a/apps/sim/lib/copilot/tools/handlers/workflow/mutations.ts b/apps/sim/lib/copilot/tools/handlers/workflow/mutations.ts index f70d5ff64bf..9973c8b4db7 100644 --- a/apps/sim/lib/copilot/tools/handlers/workflow/mutations.ts +++ b/apps/sim/lib/copilot/tools/handlers/workflow/mutations.ts @@ -464,6 +464,7 @@ export async function executeSetGlobalWorkflowVariables( assertWorkflowMutationNotAborted(context) await setWorkflowVariables(workflowId, nextVarsRecord) + notifyWorkflowUpdated(workflowId) recordAudit({ actorId: context.userId, diff --git a/apps/sim/lib/core/async-jobs/backends/database.ts b/apps/sim/lib/core/async-jobs/backends/database.ts index 4c96bb86e94..afaa44a3641 100644 --- a/apps/sim/lib/core/async-jobs/backends/database.ts +++ b/apps/sim/lib/core/async-jobs/backends/database.ts @@ -73,20 +73,23 @@ export class DatabaseJobQueue implements JobQueueBackend { payload: TPayload, options?: EnqueueOptions ): Promise { - const jobId = `run_${generateId().replace(/-/g, '').slice(0, 20)}` + const jobId = options?.jobId ?? `run_${generateId().replace(/-/g, '').slice(0, 20)}` const now = new Date() - await db.insert(asyncJobs).values({ - id: jobId, - type, - payload: payload as Record, - status: JOB_STATUS.PENDING, - createdAt: now, - attempts: 0, - maxAttempts: options?.maxAttempts ?? 3, - metadata: (options?.metadata ?? {}) as Record, - updatedAt: now, - }) + await db + .insert(asyncJobs) + .values({ + id: jobId, + type, + payload: payload as Record, + status: JOB_STATUS.PENDING, + createdAt: now, + attempts: 0, + maxAttempts: options?.maxAttempts ?? 3, + metadata: (options?.metadata ?? {}) as Record, + updatedAt: now, + }) + .onConflictDoNothing() logger.debug('Enqueued job', { jobId, type }) if (options?.runner) { diff --git a/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts b/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts index e7d04bb7352..97a7428b310 100644 --- a/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts +++ b/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts @@ -1,5 +1,5 @@ import { createLogger } from '@sim/logger' -import { runs, tasks } from '@trigger.dev/sdk' +import { runs, type TriggerOptions, tasks } from '@trigger.dev/sdk' import { type EnqueueOptions, JOB_STATUS, @@ -73,9 +73,13 @@ export class TriggerDevJobQueue implements JobQueueBackend { : payload const tags = buildTags(options) - const triggerOptions: Parameters[2] = {} + const triggerOptions: TriggerOptions = {} if (tags.length > 0) triggerOptions.tags = tags if (options?.concurrencyKey) triggerOptions.concurrencyKey = options.concurrencyKey + if (options?.jobId) { + triggerOptions.idempotencyKey = options.jobId + triggerOptions.idempotencyKeyTTL = '14d' + } const handle = await tasks.trigger(taskId, enrichedPayload, triggerOptions) logger.debug('Enqueued job via trigger.dev', { jobId: handle.id, type, taskId, tags }) diff --git a/apps/sim/lib/core/outbox/service.test.ts b/apps/sim/lib/core/outbox/service.test.ts index 338c645ac36..48adcd0fcc5 100644 --- a/apps/sim/lib/core/outbox/service.test.ts +++ b/apps/sim/lib/core/outbox/service.test.ts @@ -58,7 +58,8 @@ const { state, mockDb } = vi.hoisted(() => { if ( row.set.status === 'completed' || row.set.status === 'dead_letter' || - (row.set.status === 'pending' && 'attempts' in row.set && 'availableAt' in row.set) + (row.set.status === 'pending' && 'attempts' in row.set && 'availableAt' in row.set) || + (!('status' in row.set) && 'attempts' in row.set && 'lockedAt' in row.set) ) { return state.leaseHeld ? [{ id: 'evt-1' }] : [] } @@ -75,7 +76,7 @@ const { state, mockDb } = vi.hoisted(() => { chain.where = vi.fn(self) chain.orderBy = vi.fn(self) chain.limit = vi.fn(self) - chain.for = vi.fn(async () => state.claimedRows) + chain.for = vi.fn(async () => state.claimedRows.splice(0, 1)) return chain } @@ -339,7 +340,7 @@ describe('processOutboxEvents — handler timeout', () => { vi.useRealTimers() }) - it('times out a stuck handler and schedules retry', async () => { + it('times out a stuck handler without releasing it for overlapping retry', async () => { const neverResolves = vi.fn(() => new Promise(() => {})) state.claimedRows = [makePendingRow({ attempts: 0 })] @@ -349,9 +350,12 @@ describe('processOutboxEvents — handler timeout', () => { await vi.advanceTimersByTimeAsync(90 * 1000 + 1) const result = await promise - expect(result.retried).toBe(1) - const retryUpdate = state.updates.find((u) => u.set.status === 'pending' && 'attempts' in u.set) - expect(retryUpdate?.set.lastError).toMatch(/timed out/) + expect(result.leaseLost).toBe(1) + const timeoutUpdate = state.updates.find( + (u) => !('status' in u.set) && 'attempts' in u.set && 'lockedAt' in u.set + ) + expect(timeoutUpdate?.set.attempts).toBe(1) + expect(timeoutUpdate?.set.lastError).toMatch(/timed out/) }) }) diff --git a/apps/sim/lib/core/outbox/service.ts b/apps/sim/lib/core/outbox/service.ts index eb087d02c8e..eaf32974800 100644 --- a/apps/sim/lib/core/outbox/service.ts +++ b/apps/sim/lib/core/outbox/service.ts @@ -18,6 +18,13 @@ const BASE_BACKOFF_MS = 1000 // 1 second, doubled per attempt // a worker is still actively processing. const DEFAULT_HANDLER_TIMEOUT_MS = 90 * 1000 // 90 seconds +class OutboxHandlerTimeoutError extends Error { + constructor(timeoutMs: number) { + super(`Outbox handler timed out after ${timeoutMs}ms`) + this.name = 'OutboxHandlerTimeoutError' + } +} + /** * Context passed to every outbox handler. Use `eventId` as the Stripe * (or any external service) idempotency key so that handler retries @@ -60,6 +67,14 @@ export interface ProcessOutboxResult { reaped: number } +export type ProcessSingleOutboxResult = + | 'completed' + | 'pending' + | 'dead_letter' + | 'lease_lost' + | 'not_found' + | 'processing' + /** * Transactional outbox for reliable "DB write + external system" flows. * @@ -104,23 +119,25 @@ export async function enqueueOutboxEvent( */ export async function processOutboxEvents( handlers: OutboxHandlerRegistry, - options: { batchSize?: number } = {} + options: { batchSize?: number; maxRuntimeMs?: number; minRemainingMs?: number } = {} ): Promise { const batchSize = options.batchSize ?? 10 + const deadline = options.maxRuntimeMs ? Date.now() + options.maxRuntimeMs : undefined + const minRemainingMs = options.minRemainingMs ?? DEFAULT_HANDLER_TIMEOUT_MS + 5000 const reaped = await reapStuckProcessingRows() - const claimed = await claimBatch(batchSize) - if (claimed.length === 0) { - return { processed: 0, retried: 0, deadLettered: 0, leaseLost: 0, reaped } - } - let processed = 0 let retried = 0 let deadLettered = 0 let leaseLost = 0 - for (const event of claimed) { + for (let i = 0; i < batchSize; i++) { + if (deadline && Date.now() + minRemainingMs > deadline) break + + const [event] = await claimBatch(1) + if (!event) break + const result = await runHandler(event, handlers) if (result === 'completed') processed++ else if (result === 'dead_letter') deadLettered++ @@ -131,6 +148,52 @@ export async function processOutboxEvents( return { processed, retried, deadLettered, leaseLost, reaped } } +/** + * Process a specific outbox event immediately after its surrounding + * transaction commits. Safe to race with the cron worker: the claim uses + * `FOR UPDATE SKIP LOCKED`, and non-pending rows are left alone. + */ +export async function processOutboxEventById( + eventId: string, + handlers: OutboxHandlerRegistry +): Promise { + const now = new Date() + const event = await db.transaction(async (tx) => { + const [row] = await tx + .select() + .from(outboxEvent) + .where(eq(outboxEvent.id, eventId)) + .limit(1) + .for('update', { skipLocked: true }) + + if (!row) return null + if (row.status !== 'pending') return row.status as ProcessSingleOutboxResult + if (row.availableAt > now) return 'pending' as const + + await tx + .update(outboxEvent) + .set({ status: 'processing', lockedAt: now }) + .where(eq(outboxEvent.id, eventId)) + + return { + ...row, + status: 'processing' as const, + lockedAt: now, + } + }) + + if (!event) { + const [current] = await db + .select({ status: outboxEvent.status }) + .from(outboxEvent) + .where(eq(outboxEvent.id, eventId)) + .limit(1) + return current ? (current.status as ProcessSingleOutboxResult) : 'not_found' + } + if (typeof event === 'string') return event + return runHandler(event, handlers) +} + /** * Reaper: move `processing` rows whose worker died (stale `lockedAt`) * back to `pending` so another worker can pick them up. Without this, @@ -249,6 +312,10 @@ async function runHandler( }) return 'completed' } catch (error) { + if (error instanceof OutboxHandlerTimeoutError) { + return recordTimedOutAttempt(event, error.message) + } + const nextAttempts = event.attempts + 1 const isDead = nextAttempts >= event.maxAttempts const errMsg = toError(error).message @@ -277,33 +344,134 @@ async function runHandler( return 'dead_letter' } - // Exponential backoff, capped at MAX_BACKOFF_MS. - const backoffMs = Math.min(MAX_BACKOFF_MS, BASE_BACKOFF_MS * 2 ** nextAttempts) - const nextAvailableAt = new Date(Date.now() + backoffMs) + return scheduleRetry(event, errMsg) + } +} + +async function recordTimedOutAttempt( + event: typeof outboxEvent.$inferSelect, + errMsg: string +): Promise<'dead_letter' | 'lease_lost'> { + const nextAttempts = event.attempts + 1 + const isDead = nextAttempts >= event.maxAttempts + + if (isDead) { + const updated = await updateIfLeaseHeld(event, { + attempts: nextAttempts, + status: 'dead_letter', + lastError: errMsg, + processedAt: new Date(), + lockedAt: null, + }) + if (!updated) return 'lease_lost' + logger.error('Outbox event dead-lettered after handler timeout max attempts', { + eventId: event.id, + eventType: event.eventType, + attempts: nextAttempts, + error: errMsg, + }) + return 'dead_letter' + } + + const updated = await updateProcessingIfLeaseHeld(event, { + attempts: nextAttempts, + lastError: errMsg, + lockedAt: new Date(), + }) + if (!updated) return 'lease_lost' + + logger.warn('Outbox event handler timed out; leaving lease for stuck-row reaper', { + eventId: event.id, + eventType: event.eventType, + attempts: nextAttempts, + reaperThresholdMs: STUCK_PROCESSING_THRESHOLD_MS, + error: errMsg, + }) + return 'lease_lost' +} + +async function scheduleRetry( + event: typeof outboxEvent.$inferSelect, + errMsg: string, + minimumBackoffMs = 0 +): Promise<'pending' | 'dead_letter' | 'lease_lost'> { + const nextAttempts = event.attempts + 1 + const isDead = nextAttempts >= event.maxAttempts + + if (isDead) { const updated = await updateIfLeaseHeld(event, { attempts: nextAttempts, - status: 'pending', + status: 'dead_letter', lastError: errMsg, - availableAt: nextAvailableAt, + processedAt: new Date(), lockedAt: null, }) if (!updated) { - logger.warn('Outbox event retry-schedule skipped — lease lost', { + logger.warn('Outbox event dead-letter skipped — lease lost', { eventId: event.id, eventType: event.eventType, }) return 'lease_lost' } - logger.warn('Outbox event failed, scheduled retry', { + logger.error('Outbox event dead-lettered after max attempts', { eventId: event.id, eventType: event.eventType, attempts: nextAttempts, - backoffMs, - nextAvailableAt: nextAvailableAt.toISOString(), error: errMsg, }) - return 'pending' + return 'dead_letter' + } + + const backoffMs = Math.max( + minimumBackoffMs, + Math.min(MAX_BACKOFF_MS, BASE_BACKOFF_MS * 2 ** nextAttempts) + ) + const nextAvailableAt = new Date(Date.now() + backoffMs) + const updated = await updateIfLeaseHeld(event, { + attempts: nextAttempts, + status: 'pending', + lastError: errMsg, + availableAt: nextAvailableAt, + lockedAt: null, + }) + if (!updated) { + logger.warn('Outbox event retry-schedule skipped — lease lost', { + eventId: event.id, + eventType: event.eventType, + }) + return 'lease_lost' + } + logger.warn('Outbox event failed, scheduled retry', { + eventId: event.id, + eventType: event.eventType, + attempts: nextAttempts, + backoffMs, + nextAvailableAt: nextAvailableAt.toISOString(), + error: errMsg, + }) + return 'pending' +} + +async function updateProcessingIfLeaseHeld( + event: typeof outboxEvent.$inferSelect, + patch: { + attempts: number + lastError: string + lockedAt: Date } +): Promise { + const whereClauses = [eq(outboxEvent.id, event.id), eq(outboxEvent.status, 'processing')] + if (event.lockedAt) { + whereClauses.push(eq(outboxEvent.lockedAt, event.lockedAt)) + } + + const result = await db + .update(outboxEvent) + .set(patch) + .where(and(...whereClauses)) + .returning({ id: outboxEvent.id }) + + return result.length > 0 } function runHandlerWithTimeout( @@ -319,7 +487,7 @@ function runHandlerWithTimeout( return new Promise((resolve, reject) => { const timeout = setTimeout(() => { - reject(new Error(`Outbox handler timed out after ${timeoutMs}ms`)) + reject(new OutboxHandlerTimeoutError(timeoutMs)) }, timeoutMs) handler(event.payload, context) diff --git a/apps/sim/lib/mcp/workflow-mcp-sync.ts b/apps/sim/lib/mcp/workflow-mcp-sync.ts index 930697bb94e..06b6e28e6a4 100644 --- a/apps/sim/lib/mcp/workflow-mcp-sync.ts +++ b/apps/sim/lib/mcp/workflow-mcp-sync.ts @@ -1,6 +1,7 @@ import { db, workflowMcpServer, workflowMcpTool } from '@sim/db' import { createLogger } from '@sim/logger' import { and, eq, inArray, isNull } from 'drizzle-orm' +import type { DbOrTx } from '@/lib/db/types' import { loadDeployedWorkflowState } from '@/lib/workflows/persistence/utils' import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils' import type { WorkflowState } from '@/stores/workflows/workflow/types' @@ -46,6 +47,9 @@ interface SyncOptions { state?: { blocks?: Record } /** Context for logging (e.g., 'deploy', 'revert', 'activate') */ context?: string + tx?: DbOrTx + notify?: boolean + throwOnError?: boolean } /** @@ -58,17 +62,27 @@ interface SyncOptions { * @param options.state - Optional workflow state (if not provided, loads from DB) * @param options.context - Optional context for log messages */ -export async function syncMcpToolsForWorkflow(options: SyncOptions): Promise { - const { workflowId, requestId, state, context = 'sync' } = options +export async function syncMcpToolsForWorkflow( + options: SyncOptions +): Promise> { + const { + workflowId, + requestId, + state, + context = 'sync', + tx = db, + notify = true, + throwOnError = false, + } = options try { - const tools = await db + const tools = await tx .select({ id: workflowMcpTool.id, serverId: workflowMcpTool.serverId }) .from(workflowMcpTool) .where(and(eq(workflowMcpTool.workflowId, workflowId), isNull(workflowMcpTool.archivedAt))) if (tools.length === 0) { - return + return [] } let workflowState: { blocks?: Record } | null = state ?? null @@ -77,19 +91,19 @@ export async function syncMcpToolsForWorkflow(options: SyncOptions): Promise { + requestId: string, + tx: DbOrTx = db, + notify = true, + throwOnError = false +): Promise> { try { - const tools = await db + const tools = await tx .select({ id: workflowMcpTool.id, serverId: workflowMcpTool.serverId }) .from(workflowMcpTool) .where(and(eq(workflowMcpTool.workflowId, workflowId), isNull(workflowMcpTool.archivedAt))) - if (tools.length === 0) return + if (tools.length === 0) return [] - await db.delete(workflowMcpTool).where(eq(workflowMcpTool.workflowId, workflowId)) + await tx.delete(workflowMcpTool).where(eq(workflowMcpTool.workflowId, workflowId)) logger.info(`[${requestId}] Removed MCP tools for workflow: ${workflowId}`) - notifyAffectedServers(tools) + if (notify) notifyMcpToolServers(tools) + return tools } catch (error) { logger.error(`[${requestId}] Error removing MCP tools:`, error) + if (throwOnError) throw error + return [] } } @@ -136,7 +159,7 @@ export async function removeMcpToolsForWorkflow( * Publish pubsub events for each unique server affected by a tool change. * Resolves workspace IDs from the server table so callers don't need to pass them. */ -function notifyAffectedServers(tools: Array<{ serverId: string }>): void { +export function notifyMcpToolServers(tools: Array<{ serverId: string }>): void { if (!mcpPubSub) return const uniqueServerIds = [...new Set(tools.map((t) => t.serverId))] diff --git a/apps/sim/lib/table/service.ts b/apps/sim/lib/table/service.ts index cfdb544f7c5..987e2ff2014 100644 --- a/apps/sim/lib/table/service.ts +++ b/apps/sim/lib/table/service.ts @@ -14,6 +14,7 @@ import { getPostgresErrorCode } from '@sim/utils/errors' import { generateId } from '@sim/utils/id' import { and, count, eq, gt, gte, inArray, isNull, type SQL, sql } from 'drizzle-orm' import { generateRestoreName } from '@/lib/core/utils/restore-name' +import type { DbOrTx } from '@/lib/db/types' import { COLUMN_TYPES, NAME_PATTERN, TABLE_LIMITS, USER_TABLE_ROWS_SQL_NAME } from './constants' import { buildFilterClause, buildSortClause } from './sql' import { fireTableTrigger } from './trigger' @@ -688,13 +689,16 @@ export async function pruneStaleWorkflowGroupOutputs({ workspaceId, validBlockIds, requestId, + tx, }: { workflowId: string workspaceId: string validBlockIds: Set requestId: string + tx?: DbOrTx }): Promise { - const tables = await db + const executor = tx ?? db + const tables = await executor .select({ id: userTableDefinitions.id, schema: userTableDefinitions.schema, @@ -729,7 +733,7 @@ export async function pruneStaleWorkflowGroupOutputs({ if (!mutated) continue - await db + await executor .update(userTableDefinitions) .set({ schema: { ...schema, workflowGroups: nextGroups }, diff --git a/apps/sim/lib/webhooks/deploy.ts b/apps/sim/lib/webhooks/deploy.ts index 2a5c5e3dc0d..6f9b97b84f3 100644 --- a/apps/sim/lib/webhooks/deploy.ts +++ b/apps/sim/lib/webhooks/deploy.ts @@ -1,8 +1,8 @@ import { db } from '@sim/db' -import { webhook } from '@sim/db/schema' +import { account, credentialSetMember, webhook, workflowDeploymentVersion } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { generateShortId } from '@sim/utils/id' -import { and, eq, inArray, isNull } from 'drizzle-orm' +import { and, eq, inArray, isNotNull, isNull, or } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { getProviderIdFromServiceId } from '@/lib/oauth' import { PendingWebhookVerificationTracker } from '@/lib/webhooks/pending-verification' @@ -27,13 +27,101 @@ interface TriggerSaveError { message: string status: number } - interface TriggerSaveResult { success: boolean error?: TriggerSaveError warnings?: string[] } +export async function validateTriggerWebhookConfigForDeploy( + blocks: Record +): Promise { + const triggerBlocks = Object.values(blocks || {}).filter((b) => b && b.enabled !== false) + + for (const block of triggerBlocks) { + const triggerId = resolveTriggerId(block) + if (!triggerId || !isTriggerValid(triggerId)) continue + + const triggerDef = getTrigger(triggerId) + const provider = triggerDef.provider + const { providerConfig, missingFields } = buildProviderConfig(block, triggerId, triggerDef) + + if (missingFields.length > 0) { + return { + success: false, + error: { + message: `Missing required fields for ${triggerDef.name || triggerId}: ${missingFields.join(', ')}`, + status: 400, + }, + } + } + + if (providerConfig.requireAuth && !providerConfig.token) { + return { + success: false, + error: { + message: + 'Authentication is enabled but no token is configured. Please set an authentication token or disable authentication.', + status: 400, + }, + } + } + + if (providerConfig.credentialSetId) { + const oauthProviderId = getProviderIdFromServiceId(provider) + const hasCredential = await credentialSetHasProviderCredential( + providerConfig.credentialSetId as string, + oauthProviderId + ) + if (!hasCredential) { + return { + success: false, + error: { + message: `No valid credentials found in credential set for ${provider}. Please connect accounts and try again.`, + status: 400, + }, + } + } + } + } + + return { success: true } +} + +async function credentialSetHasProviderCredential( + credentialSetId: string, + providerId: string +): Promise { + const members = await db + .select({ userId: credentialSetMember.userId }) + .from(credentialSetMember) + .where( + and( + eq(credentialSetMember.credentialSetId, credentialSetId), + eq(credentialSetMember.status, 'active') + ) + ) + + if (members.length === 0) return false + + const [credential] = await db + .select({ id: account.id }) + .from(account) + .where( + and( + inArray( + account.userId, + members.map((member) => member.userId) + ), + eq(account.providerId, providerId), + or(isNotNull(account.accessToken), isNotNull(account.refreshToken)) + ) + ) + .limit(1) + + return Boolean(credential) +} + interface CredentialSetSyncResult { error: TriggerSaveError | null warnings: string[] @@ -47,16 +135,12 @@ interface SaveTriggerWebhooksInput { blocks: Record requestId: string deploymentVersionId?: string - /** - * The previous active version's ID. Only this version's external subscriptions - * will be cleaned up (along with draft webhooks). If not provided, skips cleanup. - */ - previousVersionId?: string /** * When true, forces recreation of external subscriptions even if webhook config is unchanged. * Used when activating a previous deployment version whose subscriptions were cleaned up. */ forceRecreateSubscriptions?: boolean + strictExternalCleanup?: boolean } function getSubBlockValue(block: BlockState, subBlockId: string): unknown { @@ -371,9 +455,12 @@ export async function saveTriggerWebhooksForDeploy({ blocks, requestId, deploymentVersionId, - previousVersionId, forceRecreateSubscriptions = false, + strictExternalCleanup = false, }: SaveTriggerWebhooksInput): Promise { + const validationResult = await validateTriggerWebhookConfigForDeploy(blocks) + if (!validationResult.success) return validationResult + const triggerBlocks = Object.values(blocks || {}).filter((b) => b && b.enabled !== false) const currentBlockIds = new Set(triggerBlocks.map((b) => b.id)) @@ -392,36 +479,6 @@ export async function saveTriggerWebhooksForDeploy({ } } - if (previousVersionId) { - const webhooksToCleanup = allWorkflowWebhooks.filter( - (wh) => wh.deploymentVersionId === previousVersionId - ) - - if (webhooksToCleanup.length > 0) { - logger.info( - `[${requestId}] Cleaning up ${webhooksToCleanup.length} external subscription(s) from previous version` - ) - for (const wh of webhooksToCleanup) { - try { - await cleanupExternalWebhook(wh, workflow, requestId) - } catch (cleanupError) { - logger.warn(`[${requestId}] Failed to cleanup external webhook ${wh.id}`, cleanupError) - } - } - } - } - - const restorePreviousSubscriptions = async () => { - if (!previousVersionId) return - await restorePreviousVersionWebhooks({ - request, - workflow, - userId, - previousVersionId, - requestId, - }) - } - const webhooksByBlockId = new Map() for (const wh of existingWebhooks) { if (!wh.blockId) continue @@ -461,7 +518,6 @@ export async function saveTriggerWebhooksForDeploy({ ) if (missingFields.length > 0) { - await restorePreviousSubscriptions() return { success: false, error: { @@ -472,7 +528,6 @@ export async function saveTriggerWebhooksForDeploy({ } if (providerConfig.requireAuth && !providerConfig.token) { - await restorePreviousSubscriptions() return { success: false, error: { @@ -544,15 +599,20 @@ export async function saveTriggerWebhooksForDeploy({ }) for (const wh of webhooksToDelete) { + let cleanupSucceeded = false try { - await cleanupExternalWebhook(wh, workflow, requestId) + await cleanupExternalWebhook(wh, workflow, requestId, { + throwOnError: strictExternalCleanup, + }) + cleanupSucceeded = true } catch (cleanupError) { logger.warn(`[${requestId}] Failed to cleanup external webhook ${wh.id}`, cleanupError) + if (strictExternalCleanup) throw cleanupError + } + if (!strictExternalCleanup || cleanupSucceeded) { + await db.delete(webhook).where(eq(webhook.id, wh.id)) } } - - const idsToDelete = webhooksToDelete.map((wh) => wh.id) - await db.delete(webhook).where(inArray(webhook.id, idsToDelete)) } const collectedWarnings: string[] = [] @@ -579,12 +639,10 @@ export async function saveTriggerWebhooksForDeploy({ } if (syncResult.error) { - await restorePreviousSubscriptions() return { success: false, error: syncResult.error, warnings: collectedWarnings } } } catch (error: unknown) { logger.error(`[${requestId}] Failed to create webhook for ${block.id}`, error) - await restorePreviousSubscriptions() return { success: false, error: { @@ -648,6 +706,7 @@ export async function saveTriggerWebhooksForDeploy({ } catch (error: unknown) { logger.error(`[${requestId}] Failed to create external subscription for ${block.id}`, error) await pendingVerificationTracker.clearAll() + let cleanupFailure: unknown for (const sub of createdSubscriptions) { if (sub.externalSubscriptionCreated) { try { @@ -659,21 +718,31 @@ export async function saveTriggerWebhooksForDeploy({ providerConfig: sub.updatedProviderConfig, }, workflow, - requestId + requestId, + { throwOnError: strictExternalCleanup } ) } catch (cleanupError) { + cleanupFailure = cleanupError logger.warn( `[${requestId}] Failed to cleanup external subscription for ${sub.block.id}`, cleanupError ) + await persistCreatedWebhookRecordAfterCleanupFailure({ + workflowId, + deploymentVersionId, + sub, + requestId, + }) } } } - await restorePreviousSubscriptions() return { success: false, error: { - message: (error as Error)?.message || 'Failed to create external subscription', + message: + (cleanupFailure as Error)?.message || + (error as Error)?.message || + 'Failed to create external subscription', status: 500, }, } @@ -714,6 +783,7 @@ export async function saveTriggerWebhooksForDeploy({ `[${requestId}] Polling configuration failed for ${sub.block.id}`, pollingError ) + const cleanedWebhookIds: string[] = [] for (const otherSub of createdSubscriptions) { if (otherSub.webhookId === sub.webhookId) continue if (otherSub.externalSubscriptionCreated) { @@ -726,29 +796,30 @@ export async function saveTriggerWebhooksForDeploy({ providerConfig: otherSub.updatedProviderConfig, }, workflow, - requestId + requestId, + { throwOnError: strictExternalCleanup } ) + cleanedWebhookIds.push(otherSub.webhookId) } catch (cleanupError) { logger.warn( `[${requestId}] Failed to cleanup external subscription for ${otherSub.block.id}`, cleanupError ) } + } else { + cleanedWebhookIds.push(otherSub.webhookId) } } - const otherWebhookIds = createdSubscriptions - .filter((s) => s.webhookId !== sub.webhookId) - .map((s) => s.webhookId) - if (otherWebhookIds.length > 0) { - await db.delete(webhook).where(inArray(webhook.id, otherWebhookIds)) + if (cleanedWebhookIds.length > 0) { + await db.delete(webhook).where(inArray(webhook.id, cleanedWebhookIds)) } - await restorePreviousSubscriptions() return { success: false, error: pollingError } } } } catch (error: unknown) { await pendingVerificationTracker.clearAll() logger.error(`[${requestId}] Failed to insert webhook records`, error) + let cleanupFailure: unknown for (const sub of createdSubscriptions) { if (sub.externalSubscriptionCreated) { try { @@ -760,21 +831,31 @@ export async function saveTriggerWebhooksForDeploy({ providerConfig: sub.updatedProviderConfig, }, workflow, - requestId + requestId, + { throwOnError: strictExternalCleanup } ) } catch (cleanupError) { + cleanupFailure = cleanupError logger.warn( `[${requestId}] Failed to cleanup external subscription for ${sub.block.id}`, cleanupError ) + await persistCreatedWebhookRecordAfterCleanupFailure({ + workflowId, + deploymentVersionId, + sub, + requestId, + }) } } } - await restorePreviousSubscriptions() return { success: false, error: { - message: (error as Error)?.message || 'Failed to save webhook records', + message: + (cleanupFailure as Error)?.message || + (error as Error)?.message || + 'Failed to save webhook records', status: 500, }, } @@ -783,6 +864,45 @@ export async function saveTriggerWebhooksForDeploy({ return { success: true, warnings: collectedWarnings.length > 0 ? collectedWarnings : undefined } } +async function persistCreatedWebhookRecordAfterCleanupFailure({ + workflowId, + deploymentVersionId, + sub, + requestId, +}: { + workflowId: string + deploymentVersionId?: string + sub: { + webhookId: string + block: BlockState + provider: string + triggerPath: string + updatedProviderConfig: Record + } + requestId: string +}): Promise { + try { + await db.insert(webhook).values({ + id: sub.webhookId, + workflowId, + deploymentVersionId: deploymentVersionId || null, + blockId: sub.block.id, + path: sub.triggerPath, + provider: sub.provider, + providerConfig: sub.updatedProviderConfig, + credentialSetId: (sub.updatedProviderConfig.credentialSetId as string | undefined) || null, + isActive: true, + createdAt: new Date(), + updatedAt: new Date(), + }) + } catch (persistError) { + logger.error( + `[${requestId}] Failed to persist webhook record after external cleanup failure`, + persistError + ) + } +} + /** * Clean up all webhooks for a workflow during undeploy. * Removes external subscriptions and deletes webhook records from the database. @@ -793,8 +913,10 @@ export async function cleanupWebhooksForWorkflow( workflowId: string, workflow: Record, requestId: string, - deploymentVersionId?: string, - skipExternalCleanup = false + deploymentVersionId?: string | null, + skipExternalCleanup = false, + strictExternalCleanup = false, + shouldDeleteWebhook?: () => Promise ): Promise { const existingWebhooks = await db .select() @@ -806,7 +928,13 @@ export async function cleanupWebhooksForWorkflow( eq(webhook.deploymentVersionId, deploymentVersionId), isNull(webhook.archivedAt) ) - : and(eq(webhook.workflowId, workflowId), isNull(webhook.archivedAt)) + : deploymentVersionId === null + ? and( + eq(webhook.workflowId, workflowId), + isNull(webhook.deploymentVersionId), + isNull(webhook.archivedAt) + ) + : and(eq(webhook.workflowId, workflowId), isNull(webhook.archivedAt)) ) if (existingWebhooks.length === 0) { @@ -824,26 +952,59 @@ export async function cleanupWebhooksForWorkflow( if (!skipExternalCleanup) { for (const wh of existingWebhooks) { + if (shouldDeleteWebhook && !(await shouldDeleteWebhook())) { + logger.info(`[${requestId}] Stopping webhook cleanup because deployment became active`, { + workflowId, + deploymentVersionId, + webhookId: wh.id, + }) + return + } + try { - await cleanupExternalWebhook(wh, workflow, requestId) + await cleanupExternalWebhook(wh, workflow, requestId, { + throwOnError: strictExternalCleanup, + }) } catch (cleanupError) { logger.warn(`[${requestId}] Failed to cleanup external webhook ${wh.id}`, cleanupError) + if (strictExternalCleanup) throw cleanupError // Continue with other webhooks even if one fails } + + const deleted = await deleteWebhookRecordAfterCleanup({ + workflowId, + deploymentVersionId, + webhookId: wh.id, + shouldDeleteWebhook, + }) + if (!deleted) { + logger.info(`[${requestId}] Stopping webhook DB cleanup because deployment became active`, { + workflowId, + deploymentVersionId, + webhookId: wh.id, + }) + return + } + } + } else { + for (const wh of existingWebhooks) { + const deleted = await deleteWebhookRecordAfterCleanup({ + workflowId, + deploymentVersionId, + webhookId: wh.id, + shouldDeleteWebhook, + }) + if (!deleted) { + logger.info(`[${requestId}] Stopping webhook DB cleanup because deployment became active`, { + workflowId, + deploymentVersionId, + webhookId: wh.id, + }) + return + } } } - await db - .delete(webhook) - .where( - deploymentVersionId - ? and( - eq(webhook.workflowId, workflowId), - eq(webhook.deploymentVersionId, deploymentVersionId) - ) - : eq(webhook.workflowId, workflowId) - ) - logger.info( deploymentVersionId ? `[${requestId}] Cleaned up webhooks for workflow ${workflowId} deployment ${deploymentVersionId}` @@ -851,60 +1012,50 @@ export async function cleanupWebhooksForWorkflow( ) } -/** - * Restore external subscriptions for a previous deployment version. - * Used when activation/deployment fails after webhooks were created, - * to restore the previous version's external subscriptions. - */ -export async function restorePreviousVersionWebhooks(params: { - request: NextRequest - workflow: Record - userId: string - previousVersionId: string - requestId: string -}): Promise { - const { request, workflow, userId, previousVersionId, requestId } = params - - const previousWebhooks = await db - .select() - .from(webhook) - .where(and(eq(webhook.deploymentVersionId, previousVersionId), isNull(webhook.archivedAt))) - - if (previousWebhooks.length === 0) { - return +async function deleteWebhookRecordAfterCleanup(params: { + workflowId: string + deploymentVersionId?: string | null + webhookId: string + shouldDeleteWebhook?: () => Promise +}): Promise { + if (params.shouldDeleteWebhook && !(await params.shouldDeleteWebhook())) { + return false } - logger.info( - `[${requestId}] Restoring ${previousWebhooks.length} external subscription(s) for previous version ${previousVersionId}` - ) + if (!params.shouldDeleteWebhook || typeof params.deploymentVersionId !== 'string') { + await db + .delete(webhook) + .where(and(eq(webhook.workflowId, params.workflowId), eq(webhook.id, params.webhookId))) + return true + } - for (const wh of previousWebhooks) { - try { - const result = await createExternalWebhookSubscription( - request, - { - id: wh.id, - path: wh.path, - provider: wh.provider, - providerConfig: (wh.providerConfig as Record) || {}, - }, - workflow, - userId, - requestId + const deploymentVersionId = params.deploymentVersionId + + return db.transaction(async (tx) => { + const [inactiveVersion] = await tx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, params.workflowId), + eq(workflowDeploymentVersion.id, deploymentVersionId), + eq(workflowDeploymentVersion.isActive, false) + ) ) - await db - .update(webhook) - .set({ - providerConfig: result.updatedProviderConfig, - updatedAt: new Date(), - }) - .where(eq(webhook.id, wh.id)) - logger.info(`[${requestId}] Restored external subscription for webhook ${wh.id}`) - } catch (restoreError) { - logger.error( - `[${requestId}] Failed to restore external subscription for webhook ${wh.id}`, - restoreError + .limit(1) + .for('update') + + if (!inactiveVersion) return false + + await tx + .delete(webhook) + .where( + and( + eq(webhook.workflowId, params.workflowId), + eq(webhook.id, params.webhookId), + eq(webhook.deploymentVersionId, deploymentVersionId) + ) ) - } - } + return true + }) } diff --git a/apps/sim/lib/webhooks/provider-subscriptions.ts b/apps/sim/lib/webhooks/provider-subscriptions.ts index e2e8eceebac..05c3eab64c6 100644 --- a/apps/sim/lib/webhooks/provider-subscriptions.ts +++ b/apps/sim/lib/webhooks/provider-subscriptions.ts @@ -119,12 +119,14 @@ export async function createExternalWebhookSubscription( /** * Clean up external webhook subscriptions for a webhook. - * Errors are swallowed — cleanup failure should not block webhook deletion. + * By default, cleanup failure is logged but non-fatal for legacy best-effort callers. + * Deployment outbox cleanup passes `throwOnError` so provider failures stay retryable. */ export async function cleanupExternalWebhook( webhook: Record, workflow: Record, - requestId: string + requestId: string, + options: { throwOnError?: boolean } = {} ): Promise { const provider = webhook.provider as string const handler = getProviderHandler(provider) @@ -134,12 +136,15 @@ export async function cleanupExternalWebhook( } try { - await handler.deleteSubscription({ webhook, workflow, requestId }) + await handler.deleteSubscription({ webhook, workflow, requestId, strict: options.throwOnError }) } catch (error) { logger.warn(`[${requestId}] Error cleaning up external webhook (non-fatal)`, { provider, webhookId: webhook.id, error: toError(error).message, }) + if (options.throwOnError) { + throw error + } } } diff --git a/apps/sim/lib/webhooks/providers/airtable.ts b/apps/sim/lib/webhooks/providers/airtable.ts index 61daafa4f30..fd0463b4b95 100644 --- a/apps/sim/lib/webhooks/providers/airtable.ts +++ b/apps/sim/lib/webhooks/providers/airtable.ts @@ -572,6 +572,7 @@ export const airtableHandler: WebhookProviderHandler = { webhook: webhookRecord, workflow, requestId, + strict, }: DeleteSubscriptionContext): Promise { try { const config = getProviderConfig(webhookRecord) @@ -584,6 +585,7 @@ export const airtableHandler: WebhookProviderHandler = { logger.warn(`[${requestId}] Missing baseId for Airtable webhook deletion`, { webhookId: webhookRecord.id, }) + if (strict) throw new Error('Missing Airtable baseId for webhook deletion') return } @@ -593,6 +595,7 @@ export const airtableHandler: WebhookProviderHandler = { webhookId: webhookRecord.id, baseId: baseId.substring(0, 20), }) + if (strict) throw new Error('Invalid Airtable baseId for webhook deletion') return } @@ -601,6 +604,7 @@ export const airtableHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing credentialId for Airtable webhook deletion ${webhookRecord.id}` ) + if (strict) throw new Error('Missing Airtable credentialId for webhook deletion') return } @@ -613,14 +617,22 @@ export const airtableHandler: WebhookProviderHandler = { ) : null if (!accessToken) { - logger.warn( - `[${requestId}] Could not retrieve Airtable access token. Cannot delete webhook in Airtable.`, - { webhookId: webhookRecord.id } - ) + const message = `[${requestId}] Could not retrieve Airtable access token. Cannot delete webhook in Airtable.` + logger.warn(message, { webhookId: webhookRecord.id }) + if (strict) throw new Error(message) return } let resolvedExternalId: string | undefined = externalId + let externalIdLookupFailed = false + + if (!resolvedExternalId && strict) { + logger.warn( + `[${requestId}] Missing Airtable externalId during strict cleanup; skipping unsafe URL-based remote deletion`, + { webhookId: webhookRecord.id, baseId } + ) + throw new Error('Missing Airtable externalId for strict cleanup') + } if (!resolvedExternalId) { try { @@ -656,6 +668,7 @@ export const airtableHandler: WebhookProviderHandler = { }) } } else { + externalIdLookupFailed = true logger.warn(`[${requestId}] Failed to list Airtable webhooks to resolve externalId`, { baseId, status: listResp.status, @@ -663,6 +676,7 @@ export const airtableHandler: WebhookProviderHandler = { }) } } catch (e: unknown) { + externalIdLookupFailed = true logger.warn(`[${requestId}] Error attempting to resolve Airtable externalId`, { error: (e as Error)?.message, }) @@ -672,7 +686,11 @@ export const airtableHandler: WebhookProviderHandler = { if (!resolvedExternalId) { logger.info(`[${requestId}] Airtable externalId not found; skipping remote deletion`, { baseId, + confirmedAbsent: !externalIdLookupFailed, }) + if (strict && externalIdLookupFailed) { + throw new Error('Could not resolve Airtable externalId for strict cleanup') + } return } @@ -682,6 +700,7 @@ export const airtableHandler: WebhookProviderHandler = { webhookId: webhookRecord.id, externalId: resolvedExternalId.substring(0, 20), }) + if (strict) throw new Error('Invalid Airtable webhook ID for deletion') return } @@ -693,22 +712,22 @@ export const airtableHandler: WebhookProviderHandler = { }, }) - if (!airtableResponse.ok) { + if (!airtableResponse.ok && airtableResponse.status !== 404) { let responseBody: unknown = null try { responseBody = await airtableResponse.json() - } catch { - // Ignore parse errors - } + } catch {} logger.warn( `[${requestId}] Failed to delete Airtable webhook in Airtable. Status: ${airtableResponse.status}`, { baseId, externalId: resolvedExternalId, response: responseBody } ) + if (strict) throw new Error(`Failed to delete Airtable webhook: ${airtableResponse.status}`) } else { logger.info(`[${requestId}] Successfully deleted Airtable webhook in Airtable`, { baseId, externalId: resolvedExternalId, + alreadyDeleted: airtableResponse.status === 404, }) } } catch (error: unknown) { @@ -718,6 +737,7 @@ export const airtableHandler: WebhookProviderHandler = { error: err.message, stack: err.stack, }) + if (strict) throw error } }, diff --git a/apps/sim/lib/webhooks/providers/ashby.ts b/apps/sim/lib/webhooks/providers/ashby.ts index f75fa58b00a..77fabe6e024 100644 --- a/apps/sim/lib/webhooks/providers/ashby.ts +++ b/apps/sim/lib/webhooks/providers/ashby.ts @@ -225,6 +225,7 @@ export const ashbyHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Missing apiKey for Ashby webhook deletion ${ctx.webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Ashby apiKey for webhook deletion') return } @@ -232,6 +233,7 @@ export const ashbyHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Missing externalId for Ashby webhook deletion ${ctx.webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Ashby externalId for webhook deletion') return } @@ -262,9 +264,11 @@ export const ashbyHandler: WebhookProviderHandler = { `[${ctx.requestId}] Failed to delete Ashby webhook (non-fatal): ${ashbyResponse.status}`, { response: responseBody } ) + if (ctx.strict) throw new Error(`Failed to delete Ashby webhook: ${ashbyResponse.status}`) } } catch (error) { logger.warn(`[${ctx.requestId}] Error deleting Ashby webhook (non-fatal)`, error) + if (ctx.strict) throw error } }, } diff --git a/apps/sim/lib/webhooks/providers/attio.ts b/apps/sim/lib/webhooks/providers/attio.ts index ea9debff8f5..1022e885d8f 100644 --- a/apps/sim/lib/webhooks/providers/attio.ts +++ b/apps/sim/lib/webhooks/providers/attio.ts @@ -251,6 +251,7 @@ export const attioHandler: WebhookProviderHandler = { webhook: webhookRecord, workflow, requestId, + strict, }: DeleteSubscriptionContext): Promise { try { const config = getProviderConfig(webhookRecord) @@ -261,6 +262,7 @@ export const attioHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing externalId for Attio webhook deletion ${webhookRecord.id}, skipping cleanup` ) + if (strict) throw new Error('Missing Attio externalId for webhook deletion') return } @@ -268,6 +270,7 @@ export const attioHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing credentialId for Attio webhook deletion ${webhookRecord.id}, skipping cleanup` ) + if (strict) throw new Error('Missing Attio credentialId for webhook deletion') return } @@ -281,10 +284,9 @@ export const attioHandler: WebhookProviderHandler = { : null if (!accessToken) { - logger.warn( - `[${requestId}] Could not retrieve Attio access token. Cannot delete webhook.`, - { webhookId: webhookRecord.id } - ) + const message = `[${requestId}] Could not retrieve Attio access token. Cannot delete webhook.` + logger.warn(message, { webhookId: webhookRecord.id }) + if (strict) throw new Error(message) return } @@ -301,11 +303,13 @@ export const attioHandler: WebhookProviderHandler = { `[${requestId}] Failed to delete Attio webhook (non-fatal): ${attioResponse.status}`, { response: responseBody } ) + if (strict) throw new Error(`Failed to delete Attio webhook: ${attioResponse.status}`) } else { logger.info(`[${requestId}] Successfully deleted Attio webhook ${externalId}`) } } catch (error) { logger.warn(`[${requestId}] Error deleting Attio webhook (non-fatal)`, error) + if (strict) throw error } }, diff --git a/apps/sim/lib/webhooks/providers/calendly.ts b/apps/sim/lib/webhooks/providers/calendly.ts index a85b108c5bf..d574d6e9534 100644 --- a/apps/sim/lib/webhooks/providers/calendly.ts +++ b/apps/sim/lib/webhooks/providers/calendly.ts @@ -174,6 +174,7 @@ export const calendlyHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Missing apiKey for Calendly webhook deletion ${ctx.webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Calendly apiKey for webhook deletion') return } @@ -181,6 +182,7 @@ export const calendlyHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Missing externalId for Calendly webhook deletion ${ctx.webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Calendly externalId for webhook deletion') return } @@ -199,6 +201,9 @@ export const calendlyHandler: WebhookProviderHandler = { `[${ctx.requestId}] Failed to delete Calendly webhook (non-fatal): ${calendlyResponse.status}`, { response: responseBody } ) + if (ctx.strict) { + throw new Error(`Failed to delete Calendly webhook: ${calendlyResponse.status}`) + } } else { logger.info( `[${ctx.requestId}] Successfully deleted Calendly webhook subscription ${externalId}` @@ -206,6 +211,7 @@ export const calendlyHandler: WebhookProviderHandler = { } } catch (error) { logger.warn(`[${ctx.requestId}] Error deleting Calendly webhook (non-fatal)`, error) + if (ctx.strict) throw error } }, } diff --git a/apps/sim/lib/webhooks/providers/emailbison.ts b/apps/sim/lib/webhooks/providers/emailbison.ts index 116e9f7337c..1601cb30eb5 100644 --- a/apps/sim/lib/webhooks/providers/emailbison.ts +++ b/apps/sim/lib/webhooks/providers/emailbison.ts @@ -206,6 +206,7 @@ export const emailBisonHandler: WebhookProviderHandler = { hasApiBaseUrl: Boolean(apiBaseUrl), hasExternalId: Boolean(externalId), }) + if (ctx.strict) throw new Error('Missing Email Bison webhook cleanup configuration') return } @@ -223,6 +224,7 @@ export const emailBisonHandler: WebhookProviderHandler = { status: response.status, response: responseBody, }) + if (ctx.strict) throw new Error(`Failed to delete Email Bison webhook: ${response.status}`) return } @@ -235,6 +237,7 @@ export const emailBisonHandler: WebhookProviderHandler = { logger.warn(`[${requestId}] Error deleting Email Bison webhook`, { message: toError(error).message, }) + if (ctx.strict) throw error } }, } diff --git a/apps/sim/lib/webhooks/providers/fathom.ts b/apps/sim/lib/webhooks/providers/fathom.ts index c158c73e369..341f50c4592 100644 --- a/apps/sim/lib/webhooks/providers/fathom.ts +++ b/apps/sim/lib/webhooks/providers/fathom.ts @@ -131,6 +131,7 @@ export const fathomHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing apiKey for Fathom webhook deletion ${webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Fathom apiKey for webhook deletion') return } @@ -138,6 +139,7 @@ export const fathomHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing externalId for Fathom webhook deletion ${webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Fathom externalId for webhook deletion') return } @@ -146,6 +148,7 @@ export const fathomHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Invalid externalId format for Fathom webhook deletion ${webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Invalid Fathom externalId for webhook deletion') return } @@ -163,11 +166,13 @@ export const fathomHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Failed to delete Fathom webhook (non-fatal): ${fathomResponse.status}` ) + if (ctx.strict) throw new Error(`Failed to delete Fathom webhook: ${fathomResponse.status}`) } else { logger.info(`[${requestId}] Successfully deleted Fathom webhook ${externalId}`) } } catch (error) { logger.warn(`[${requestId}] Error deleting Fathom webhook (non-fatal)`, error) + if (ctx.strict) throw error } }, } diff --git a/apps/sim/lib/webhooks/providers/grain.ts b/apps/sim/lib/webhooks/providers/grain.ts index 39be11cab66..8b32fb17db9 100644 --- a/apps/sim/lib/webhooks/providers/grain.ts +++ b/apps/sim/lib/webhooks/providers/grain.ts @@ -215,6 +215,7 @@ export const grainHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing apiKey for Grain webhook deletion ${webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Grain apiKey for webhook deletion') return } @@ -222,6 +223,7 @@ export const grainHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing externalId for Grain webhook deletion ${webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Grain externalId for webhook deletion') return } @@ -241,11 +243,13 @@ export const grainHandler: WebhookProviderHandler = { `[${requestId}] Failed to delete Grain webhook (non-fatal): ${grainResponse.status}`, { response: responseBody } ) + if (ctx.strict) throw new Error(`Failed to delete Grain webhook: ${grainResponse.status}`) } else { logger.info(`[${requestId}] Successfully deleted Grain webhook ${externalId}`) } } catch (error) { logger.warn(`[${requestId}] Error deleting Grain webhook (non-fatal)`, error) + if (ctx.strict) throw error } }, } diff --git a/apps/sim/lib/webhooks/providers/lemlist.ts b/apps/sim/lib/webhooks/providers/lemlist.ts index 2215839b8de..24759354a36 100644 --- a/apps/sim/lib/webhooks/providers/lemlist.ts +++ b/apps/sim/lib/webhooks/providers/lemlist.ts @@ -131,6 +131,7 @@ export const lemlistHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing apiKey for Lemlist webhook deletion ${webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Lemlist apiKey for webhook deletion') return } @@ -142,6 +143,7 @@ export const lemlistHandler: WebhookProviderHandler = { logger.warn(`[${requestId}] Invalid Lemlist hook ID format, skipping deletion`, { id: id.substring(0, 30), }) + if (ctx.strict) throw new Error('Invalid Lemlist hook ID for deletion') return } @@ -159,6 +161,9 @@ export const lemlistHandler: WebhookProviderHandler = { `[${requestId}] Failed to delete Lemlist webhook (non-fatal): ${lemlistResponse.status}`, { response: responseBody } ) + if (ctx.strict) { + throw new Error(`Failed to delete Lemlist webhook: ${lemlistResponse.status}`) + } } else { logger.info(`[${requestId}] Successfully deleted Lemlist webhook ${id}`) } @@ -169,6 +174,14 @@ export const lemlistHandler: WebhookProviderHandler = { return } + if (ctx.strict) { + logger.warn( + `[${requestId}] Missing Lemlist externalId during strict cleanup; skipping unsafe URL-based remote deletion`, + { webhookId: webhook.id } + ) + throw new Error('Missing Lemlist externalId for strict cleanup') + } + const notificationUrl = getNotificationUrl(webhook) const listResponse = await fetch('https://api.lemlist.com/api/hooks', { method: 'GET', @@ -181,6 +194,7 @@ export const lemlistHandler: WebhookProviderHandler = { logger.warn(`[${requestId}] Failed to list Lemlist webhooks for cleanup ${webhook.id}`, { status: listResponse.status, }) + if (ctx.strict) throw new Error(`Failed to list Lemlist webhooks: ${listResponse.status}`) return } @@ -213,6 +227,7 @@ export const lemlistHandler: WebhookProviderHandler = { } } catch (error) { logger.warn(`[${requestId}] Error deleting Lemlist webhook (non-fatal)`, error) + if (ctx.strict) throw error } }, } diff --git a/apps/sim/lib/webhooks/providers/linear.ts b/apps/sim/lib/webhooks/providers/linear.ts index 60fb176c28b..4402659f71d 100644 --- a/apps/sim/lib/webhooks/providers/linear.ts +++ b/apps/sim/lib/webhooks/providers/linear.ts @@ -249,10 +249,16 @@ export const linearHandler: WebhookProviderHandler = { async deleteSubscription(ctx: DeleteSubscriptionContext): Promise { const config = getProviderConfig(ctx.webhook) + const triggerId = config.triggerId as string | undefined + if (!triggerId || !triggerId.endsWith('_v2')) { + return + } + const externalId = config.externalId as string | undefined const apiKey = config.apiKey as string | undefined if (!externalId || !apiKey) { + if (ctx.strict) throw new Error('Missing Linear webhook deletion credentials') return } @@ -275,6 +281,7 @@ export const linearHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Linear API returned HTTP ${response.status} during webhook deletion for ${externalId}` ) + if (ctx.strict) throw new Error(`Linear webhook deletion failed: ${response.status}`) return } @@ -284,14 +291,48 @@ export const linearHandler: WebhookProviderHandler = { `[${ctx.requestId}] Deleted Linear webhook ${externalId} for webhook ${ctx.webhook.id}` ) } else { + const errorMessages = getGraphQLErrorMessages(data) + if (errorMessages.some(isAlreadyAbsentWebhookMessage)) { + logger.info( + `[${ctx.requestId}] Linear webhook ${externalId} was already absent during deletion` + ) + return + } + logger.warn( `[${ctx.requestId}] Linear webhook deletion returned unsuccessful for ${externalId}` ) + if (ctx.strict) throw new Error('Linear webhook deletion returned unsuccessful') } } catch (error) { logger.warn(`[${ctx.requestId}] Error deleting Linear webhook ${externalId} (non-fatal)`, { error: toError(error).message, }) + if (ctx.strict) throw error } }, } + +function getGraphQLErrorMessages(data: unknown): string[] { + if (!data || typeof data !== 'object' || Array.isArray(data)) return [] + const errors = (data as Record).errors + if (!Array.isArray(errors)) return [] + + return errors + .map((error) => { + if (!error || typeof error !== 'object' || Array.isArray(error)) return null + const message = (error as Record).message + return typeof message === 'string' ? message : null + }) + .filter((message): message is string => Boolean(message)) +} + +function isAlreadyAbsentWebhookMessage(message: string): boolean { + const normalized = message.toLowerCase() + return ( + normalized.includes('not found') || + normalized.includes('not_found') || + normalized.includes('does not exist') || + normalized.includes('already deleted') + ) +} diff --git a/apps/sim/lib/webhooks/providers/microsoft-teams.ts b/apps/sim/lib/webhooks/providers/microsoft-teams.ts index 887747c242e..be749f4e990 100644 --- a/apps/sim/lib/webhooks/providers/microsoft-teams.ts +++ b/apps/sim/lib/webhooks/providers/microsoft-teams.ts @@ -675,6 +675,7 @@ export const microsoftTeamsHandler: WebhookProviderHandler = { webhook, workflow, requestId, + strict, }: DeleteSubscriptionContext): Promise { try { const config = getProviderConfig(webhook) @@ -688,6 +689,7 @@ export const microsoftTeamsHandler: WebhookProviderHandler = { if (!externalSubscriptionId || !credentialId) { logger.info(`[${requestId}] No external subscription to delete for webhook ${webhook.id}`) + if (strict) throw new Error('Missing Teams subscription cleanup configuration') return } @@ -703,6 +705,7 @@ export const microsoftTeamsHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Could not get access token to delete Teams subscription for webhook ${webhook.id}` ) + if (strict) throw new Error('Missing Teams access token for subscription deletion') return } @@ -723,12 +726,14 @@ export const microsoftTeamsHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Failed to delete Teams subscription ${externalSubscriptionId} for webhook ${webhook.id}. Status: ${res.status}` ) + if (strict) throw new Error(`Failed to delete Teams subscription: ${res.status}`) } } catch (error) { logger.error( `[${requestId}] Error deleting Teams subscription for webhook ${webhook.id}`, error ) + if (strict) throw error } }, diff --git a/apps/sim/lib/webhooks/providers/monday.ts b/apps/sim/lib/webhooks/providers/monday.ts index da7f872c6fa..87c1f49994a 100644 --- a/apps/sim/lib/webhooks/providers/monday.ts +++ b/apps/sim/lib/webhooks/providers/monday.ts @@ -181,6 +181,7 @@ export const mondayHandler: WebhookProviderHandler = { const externalId = config.externalId as string | undefined if (!externalId) { + if (ctx.strict) throw new Error('Missing Monday externalId for webhook deletion') return } @@ -189,6 +190,7 @@ export const mondayHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Invalid externalId format for Monday webhook deletion: ${externalId}` ) + if (ctx.strict) throw new Error('Invalid Monday externalId for webhook deletion') return } @@ -210,12 +212,14 @@ export const mondayHandler: WebhookProviderHandler = { `[${ctx.requestId}] Could not resolve credentials for Monday webhook deletion (non-fatal)`, { error: toError(error).message } ) + if (ctx.strict) throw error } if (!accessToken) { logger.warn( `[${ctx.requestId}] No access token available for Monday webhook deletion ${externalId} (non-fatal)` ) + if (ctx.strict) throw new Error('Missing Monday access token for webhook deletion') return } @@ -236,6 +240,7 @@ export const mondayHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Monday API returned HTTP ${response.status} during webhook deletion for ${externalId}` ) + if (ctx.strict) throw new Error(`Monday webhook deletion failed: ${response.status}`) return } @@ -246,9 +251,16 @@ export const mondayHandler: WebhookProviderHandler = { data.errors?.map((e: { message: string }) => e.message).join(', ') || data.error_message || 'Unknown error' + if (isAlreadyAbsentWebhookMessage(errorMsg)) { + logger.info( + `[${ctx.requestId}] Monday webhook ${externalId} was already absent during deletion` + ) + return + } logger.warn( `[${ctx.requestId}] Monday webhook deletion GraphQL error for ${externalId}: ${errorMsg}` ) + if (ctx.strict) throw new Error(`Monday webhook deletion failed: ${errorMsg}`) return } @@ -258,11 +270,13 @@ export const mondayHandler: WebhookProviderHandler = { ) } else { logger.warn(`[${ctx.requestId}] Monday webhook deletion returned no data for ${externalId}`) + if (ctx.strict) throw new Error('Monday webhook deletion returned no data') } } catch (error) { logger.warn(`[${ctx.requestId}] Error deleting Monday webhook ${externalId} (non-fatal)`, { error: toError(error).message, }) + if (ctx.strict) throw error } }, @@ -331,3 +345,13 @@ export const mondayHandler: WebhookProviderHandler = { return null }, } + +function isAlreadyAbsentWebhookMessage(message: string): boolean { + const normalized = message.toLowerCase() + return ( + normalized.includes('not found') || + normalized.includes('not_found') || + normalized.includes('does not exist') || + normalized.includes('already deleted') + ) +} diff --git a/apps/sim/lib/webhooks/providers/resend.ts b/apps/sim/lib/webhooks/providers/resend.ts index f0da305381e..c04289e3a00 100644 --- a/apps/sim/lib/webhooks/providers/resend.ts +++ b/apps/sim/lib/webhooks/providers/resend.ts @@ -270,6 +270,7 @@ export const resendHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing apiKey or externalId for Resend webhook deletion ${webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Resend webhook deletion credentials') return } @@ -286,11 +287,13 @@ export const resendHandler: WebhookProviderHandler = { `[${requestId}] Failed to delete Resend webhook (non-fatal): ${resendResponse.status}`, { response: responseBody } ) + if (ctx.strict) throw new Error(`Failed to delete Resend webhook: ${resendResponse.status}`) } else { logger.info(`[${requestId}] Successfully deleted Resend webhook ${externalId}`) } } catch (error) { logger.warn(`[${requestId}] Error deleting Resend webhook (non-fatal)`, error) + if (ctx.strict) throw error } }, } diff --git a/apps/sim/lib/webhooks/providers/telegram.ts b/apps/sim/lib/webhooks/providers/telegram.ts index 77bb623be16..9720bccd60b 100644 --- a/apps/sim/lib/webhooks/providers/telegram.ts +++ b/apps/sim/lib/webhooks/providers/telegram.ts @@ -1,4 +1,6 @@ +import { db, webhook, workflowDeploymentVersion } from '@sim/db' import { createLogger } from '@sim/logger' +import { and, eq, isNull, ne } from 'drizzle-orm' import { getNotificationUrl, getProviderConfig } from '@/lib/webhooks/provider-subscription-utils' import type { AuthContext, @@ -184,6 +186,15 @@ export const telegramHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Missing botToken for Telegram webhook deletion ${ctx.webhook.id}` ) + if (ctx.strict) throw new Error('Missing Telegram botToken for webhook deletion') + return + } + + if (await activeTelegramWebhookUsesBot(ctx.webhook, botToken)) { + logger.info( + `[${ctx.requestId}] Skipping Telegram webhook deletion because an active deployment uses the same bot token`, + { webhookId: ctx.webhook.id } + ) return } @@ -199,6 +210,7 @@ export const telegramHandler: WebhookProviderHandler = { responseBody.description || `Failed to delete Telegram webhook. Status: ${telegramResponse.status}` logger.error(`[${ctx.requestId}] ${errorMessage}`, { response: responseBody }) + if (ctx.strict) throw new Error(errorMessage) } else { logger.info( `[${ctx.requestId}] Successfully deleted Telegram webhook for webhook ${ctx.webhook.id}` @@ -209,6 +221,39 @@ export const telegramHandler: WebhookProviderHandler = { `[${ctx.requestId}] Error deleting Telegram webhook for webhook ${ctx.webhook.id}`, error ) + if (ctx.strict) throw error } }, } + +async function activeTelegramWebhookUsesBot( + webhookRecord: Record, + botToken: string +): Promise { + const workflowId = webhookRecord.workflowId + const webhookId = webhookRecord.id + if (typeof workflowId !== 'string' || typeof webhookId !== 'string') return false + + const activeWebhooks = await db + .select({ id: webhook.id, providerConfig: webhook.providerConfig }) + .from(webhook) + .innerJoin( + workflowDeploymentVersion, + eq(webhook.deploymentVersionId, workflowDeploymentVersion.id) + ) + .where( + and( + eq(webhook.workflowId, workflowId), + ne(webhook.id, webhookId), + eq(webhook.provider, 'telegram'), + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.isActive, true), + isNull(webhook.archivedAt) + ) + ) + + return activeWebhooks.some((activeWebhook) => { + const config = getProviderConfig({ providerConfig: activeWebhook.providerConfig }) + return config.botToken === botToken + }) +} diff --git a/apps/sim/lib/webhooks/providers/typeform.ts b/apps/sim/lib/webhooks/providers/typeform.ts index 16df0e6c47d..e8a7384009b 100644 --- a/apps/sim/lib/webhooks/providers/typeform.ts +++ b/apps/sim/lib/webhooks/providers/typeform.ts @@ -186,6 +186,7 @@ export const typeformHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Missing formId or apiKey for Typeform webhook deletion ${ctx.webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Typeform webhook deletion credentials') return } @@ -203,11 +204,15 @@ export const typeformHandler: WebhookProviderHandler = { logger.warn( `[${ctx.requestId}] Failed to delete Typeform webhook (non-fatal): ${typeformResponse.status}` ) + if (ctx.strict) { + throw new Error(`Failed to delete Typeform webhook: ${typeformResponse.status}`) + } } else { logger.info(`[${ctx.requestId}] Successfully deleted Typeform webhook with tag ${tag}`) } } catch (error) { logger.warn(`[${ctx.requestId}] Error deleting Typeform webhook (non-fatal)`, error) + if (ctx.strict) throw error } }, } diff --git a/apps/sim/lib/webhooks/providers/types.ts b/apps/sim/lib/webhooks/providers/types.ts index dee3e8aca19..d25037117d9 100644 --- a/apps/sim/lib/webhooks/providers/types.ts +++ b/apps/sim/lib/webhooks/providers/types.ts @@ -75,6 +75,7 @@ export interface DeleteSubscriptionContext { webhook: Record workflow: Record requestId: string + strict?: boolean } /** Context for configuring polling after webhook creation. */ @@ -127,7 +128,7 @@ export interface WebhookProviderHandler { /** Create an external webhook subscription (e.g., register with Telegram, Airtable, etc.). */ createSubscription?(ctx: SubscriptionContext): Promise - /** Delete an external webhook subscription during cleanup. Errors should not throw. */ + /** Delete an external webhook subscription during cleanup. Strict outbox cleanup should throw. */ deleteSubscription?(ctx: DeleteSubscriptionContext): Promise /** Configure polling after webhook creation (gmail, outlook, rss, imap). */ diff --git a/apps/sim/lib/webhooks/providers/vercel.ts b/apps/sim/lib/webhooks/providers/vercel.ts index edf5f9d6220..099931c4ad4 100644 --- a/apps/sim/lib/webhooks/providers/vercel.ts +++ b/apps/sim/lib/webhooks/providers/vercel.ts @@ -219,6 +219,7 @@ export const vercelHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing apiKey or externalId for Vercel webhook deletion ${webhook.id}, skipping cleanup` ) + if (ctx.strict) throw new Error('Missing Vercel webhook deletion credentials') return } @@ -237,12 +238,14 @@ export const vercelHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Failed to delete Vercel webhook (non-fatal): ${response.status}` ) + if (ctx.strict) throw new Error(`Failed to delete Vercel webhook: ${response.status}`) } else { await response.body?.cancel() logger.info(`[${requestId}] Successfully deleted Vercel webhook ${externalId}`) } } catch (error) { logger.warn(`[${requestId}] Error deleting Vercel webhook (non-fatal)`, error) + if (ctx.strict) throw error } }, diff --git a/apps/sim/lib/webhooks/providers/webflow.ts b/apps/sim/lib/webhooks/providers/webflow.ts index 9399bcd54e3..7494ae39568 100644 --- a/apps/sim/lib/webhooks/providers/webflow.ts +++ b/apps/sim/lib/webhooks/providers/webflow.ts @@ -150,6 +150,7 @@ export const webflowHandler: WebhookProviderHandler = { webhook: webhookRecord, workflow, requestId, + strict, }: DeleteSubscriptionContext): Promise { try { const config = getProviderConfig(webhookRecord) @@ -160,6 +161,7 @@ export const webflowHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing siteId for Webflow webhook deletion ${webhookRecord.id}, skipping cleanup` ) + if (strict) throw new Error('Missing Webflow siteId for webhook deletion') return } @@ -167,6 +169,7 @@ export const webflowHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing externalId for Webflow webhook deletion ${webhookRecord.id}, skipping cleanup` ) + if (strict) throw new Error('Missing Webflow externalId for webhook deletion') return } @@ -176,6 +179,7 @@ export const webflowHandler: WebhookProviderHandler = { webhookId: webhookRecord.id, siteId: siteId.substring(0, 30), }) + if (strict) throw new Error('Invalid Webflow siteId for webhook deletion') return } @@ -185,6 +189,7 @@ export const webflowHandler: WebhookProviderHandler = { webhookId: webhookRecord.id, externalId: externalId.substring(0, 30), }) + if (strict) throw new Error('Invalid Webflow webhook ID for deletion') return } @@ -193,6 +198,7 @@ export const webflowHandler: WebhookProviderHandler = { logger.warn( `[${requestId}] Missing credentialId for Webflow webhook deletion ${webhookRecord.id}` ) + if (strict) throw new Error('Missing Webflow credentialId for webhook deletion') return } @@ -205,10 +211,9 @@ export const webflowHandler: WebhookProviderHandler = { ) : null if (!accessToken) { - logger.warn( - `[${requestId}] Could not retrieve Webflow access token. Cannot delete webhook.`, - { webhookId: webhookRecord.id } - ) + const message = `[${requestId}] Could not retrieve Webflow access token. Cannot delete webhook.` + logger.warn(message, { webhookId: webhookRecord.id }) + if (strict) throw new Error(message) return } @@ -228,11 +233,15 @@ export const webflowHandler: WebhookProviderHandler = { `[${requestId}] Failed to delete Webflow webhook (non-fatal): ${webflowResponse.status}`, { response: responseBody } ) + if (strict) { + throw new Error(`Failed to delete Webflow webhook: ${webflowResponse.status}`) + } } else { logger.info(`[${requestId}] Successfully deleted Webflow webhook ${externalId}`) } } catch (error) { logger.warn(`[${requestId}] Error deleting Webflow webhook (non-fatal)`, error) + if (strict) throw error } }, diff --git a/apps/sim/lib/workflows/comparison/normalize.test.ts b/apps/sim/lib/workflows/comparison/normalize.test.ts index 9aa6c9b1209..0cdac720978 100644 --- a/apps/sim/lib/workflows/comparison/normalize.test.ts +++ b/apps/sim/lib/workflows/comparison/normalize.test.ts @@ -13,6 +13,7 @@ import { normalizeValue, sanitizeInputFormat, sanitizeTools, + sanitizeVariable, sortEdges, } from './normalize' @@ -152,6 +153,26 @@ describe('Workflow Normalization Utilities', () => { }) }) + describe('sanitizeVariable', () => { + it.concurrent('removes UI-only fields without changing persisted values', () => { + expect( + sanitizeVariable({ + id: 'variable-a', + workflowId: 'workflow-a', + name: 'Optional payload', + type: 'object', + value: null, + validationError: 'invalid', + }) + ).toEqual({ + id: 'variable-a', + name: 'Optional payload', + type: 'object', + value: null, + }) + }) + }) + describe('normalizeLoop', () => { it.concurrent('should normalize null/undefined to undefined', () => { // null and undefined are semantically equivalent diff --git a/apps/sim/lib/workflows/comparison/normalize.ts b/apps/sim/lib/workflows/comparison/normalize.ts index 741208e62ed..effb4770a89 100644 --- a/apps/sim/lib/workflows/comparison/normalize.ts +++ b/apps/sim/lib/workflows/comparison/normalize.ts @@ -201,19 +201,19 @@ export function sanitizeTools(tools: unknown[] | undefined): Record | null | undefined { + variable: VariableWithUiFields | null | undefined +): Omit | null | undefined { if (!variable || typeof variable !== 'object') return variable - const { validationError, ...rest } = variable + const { validationError: _validationError, workflowId: _workflowId, ...rest } = variable return rest } diff --git a/apps/sim/lib/workflows/deployment-outbox.ts b/apps/sim/lib/workflows/deployment-outbox.ts new file mode 100644 index 00000000000..ef6f003066d --- /dev/null +++ b/apps/sim/lib/workflows/deployment-outbox.ts @@ -0,0 +1,705 @@ +import { db, workflowDeploymentVersion, workflow as workflowTable } from '@sim/db' +import { createLogger } from '@sim/logger' +import { and, eq, ne } from 'drizzle-orm' +import { NextRequest } from 'next/server' +import { + enqueueOutboxEvent, + type OutboxHandlerRegistry, + type ProcessSingleOutboxResult, + processOutboxEventById, +} from '@/lib/core/outbox/service' +import { generateRequestId } from '@/lib/core/utils/request' +import { getBaseUrl } from '@/lib/core/utils/urls' +import { + notifyMcpToolServers, + removeMcpToolsForWorkflow, + syncMcpToolsForWorkflow, +} from '@/lib/mcp/workflow-mcp-sync' +import { cleanupWebhooksForWorkflow, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy' +import { createSchedulesForDeploy, deleteSchedulesForWorkflow } from '@/lib/workflows/schedules' +import type { BlockState } from '@/stores/workflows/workflow/types' + +const logger = createLogger('WorkflowDeploymentOutbox') + +export const WORKFLOW_DEPLOYMENT_OUTBOX_EVENTS = { + SYNC_ACTIVE_SIDE_EFFECTS: 'workflow.deployment.sync-active-side-effects', + CLEANUP_INACTIVE_SIDE_EFFECTS: 'workflow.deployment.cleanup-inactive-side-effects', + CLEANUP_UNDEPLOYED_SIDE_EFFECTS: 'workflow.deployment.cleanup-undeployed-side-effects', +} as const + +interface SyncActiveSideEffectsPayload { + workflowId: string + deploymentVersionId: string + userId: string + requestId?: string + forceRecreateSubscriptions?: boolean +} + +interface CleanupUndeployedSideEffectsPayload { + workflowId: string + deploymentVersionIds: string[] + userId: string + requestId?: string +} + +interface CleanupInactiveSideEffectsPayload { + workflowId: string + activeDeploymentVersionId: string + userId: string + requestId?: string +} + +export async function enqueueWorkflowDeploymentSideEffects( + executor: Pick, + payload: SyncActiveSideEffectsPayload +): Promise { + return enqueueOutboxEvent( + executor, + WORKFLOW_DEPLOYMENT_OUTBOX_EVENTS.SYNC_ACTIVE_SIDE_EFFECTS, + payload, + { maxAttempts: 10 } + ) +} + +export async function enqueueWorkflowUndeploySideEffects( + executor: Pick, + payload: CleanupUndeployedSideEffectsPayload +): Promise { + return enqueueOutboxEvent( + executor, + WORKFLOW_DEPLOYMENT_OUTBOX_EVENTS.CLEANUP_UNDEPLOYED_SIDE_EFFECTS, + payload, + { maxAttempts: 10 } + ) +} + +export async function enqueueWorkflowInactiveDeploymentCleanup( + executor: Pick, + payload: CleanupInactiveSideEffectsPayload +): Promise { + return enqueueOutboxEvent( + executor, + WORKFLOW_DEPLOYMENT_OUTBOX_EVENTS.CLEANUP_INACTIVE_SIDE_EFFECTS, + payload, + { maxAttempts: 10 } + ) +} + +export async function processWorkflowDeploymentOutboxEvent( + eventId: string +): Promise { + return processOutboxEventById(eventId, workflowDeploymentOutboxHandlers) +} + +const syncActiveSideEffects = async (rawPayload: unknown): Promise => { + const payload = parseSyncActiveSideEffectsPayload(rawPayload) + const requestId = payload.requestId ?? generateRequestId() + const [workflowRecord] = await db + .select() + .from(workflowTable) + .where(eq(workflowTable.id, payload.workflowId)) + .limit(1) + + if (!workflowRecord) { + logger.warn(`[${requestId}] Workflow missing during deployment side-effect sync`, { + workflowId: payload.workflowId, + }) + return + } + + const [versionRow] = await db + .select({ + id: workflowDeploymentVersion.id, + state: workflowDeploymentVersion.state, + isActive: workflowDeploymentVersion.isActive, + }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, payload.workflowId), + eq(workflowDeploymentVersion.id, payload.deploymentVersionId) + ) + ) + .limit(1) + + if (!versionRow?.isActive) { + logger.info(`[${requestId}] Skipping stale deployment side-effect sync`, { + workflowId: payload.workflowId, + deploymentVersionId: payload.deploymentVersionId, + }) + if (versionRow) { + await cleanupDeploymentVersionIfInactive({ + workflowId: payload.workflowId, + deploymentVersionId: payload.deploymentVersionId, + workflow: workflowRecord as Record, + userId: payload.userId, + requestId, + }) + } + return + } + + const state = versionRow.state as { blocks?: Record } + const blocks = state.blocks ?? {} + const workflowData = workflowRecord as Record + + if (!(await cleanupStaleDeploymentIfNeeded({ payload, workflow: workflowData, requestId }))) { + return + } + + const request = new NextRequest(new URL('/api/webhooks', getBaseUrl())) + const triggerSaveResult = await saveTriggerWebhooksForDeploy({ + request, + workflowId: payload.workflowId, + workflow: workflowData, + userId: payload.userId, + blocks, + requestId, + deploymentVersionId: payload.deploymentVersionId, + forceRecreateSubscriptions: payload.forceRecreateSubscriptions ?? false, + strictExternalCleanup: true, + }) + + if (!triggerSaveResult.success) { + throw new Error(triggerSaveResult.error?.message || 'Failed to sync trigger configuration') + } + + if (!(await cleanupStaleDeploymentIfNeeded({ payload, workflow: workflowData, requestId }))) { + return + } + + const scheduleResult = await createSchedulesIfStillActive({ + workflowId: payload.workflowId, + deploymentVersionId: payload.deploymentVersionId, + blocks, + }) + if (!scheduleResult.success) { + throw new Error(scheduleResult.error || 'Failed to sync schedules') + } + + if (!(await cleanupStaleDeploymentIfNeeded({ payload, workflow: workflowData, requestId }))) { + return + } + + await syncMcpToolsIfStillActive({ + workflowId: payload.workflowId, + deploymentVersionId: payload.deploymentVersionId, + requestId, + state, + }) + + if (!(await cleanupStaleDeploymentIfNeeded({ payload, workflow: workflowData, requestId }))) { + return + } + + if (workflowRecord.workspaceId) { + await pruneWorkflowGroupOutputsIfStillActive({ + workflowId: payload.workflowId, + deploymentVersionId: payload.deploymentVersionId, + workspaceId: workflowRecord.workspaceId, + validBlockIds: new Set(Object.keys(blocks)), + requestId, + }) + } + + if (!(await cleanupStaleDeploymentIfNeeded({ payload, workflow: workflowData, requestId }))) { + return + } + + await enqueueWorkflowInactiveDeploymentCleanup(db, { + workflowId: payload.workflowId, + activeDeploymentVersionId: payload.deploymentVersionId, + userId: payload.userId, + requestId, + }) +} + +const cleanupInactiveSideEffects = async (rawPayload: unknown): Promise => { + const payload = parseCleanupInactiveSideEffectsPayload(rawPayload) + const requestId = payload.requestId ?? generateRequestId() + const [workflowRecord] = await db + .select() + .from(workflowTable) + .where(eq(workflowTable.id, payload.workflowId)) + .limit(1) + + if (!workflowRecord) return + + await cleanupInactiveDeploymentVersions({ + workflowId: payload.workflowId, + activeDeploymentVersionId: payload.activeDeploymentVersionId, + workflow: workflowRecord as Record, + userId: payload.userId, + requestId, + }) +} + +const cleanupUndeployedSideEffects = async (rawPayload: unknown): Promise => { + const payload = parseCleanupUndeployedSideEffectsPayload(rawPayload) + const requestId = payload.requestId ?? generateRequestId() + const [workflowRecord] = await db + .select() + .from(workflowTable) + .where(eq(workflowTable.id, payload.workflowId)) + .limit(1) + + if (!workflowRecord) return + const workflowData = workflowRecord as Record + + for (const deploymentVersionId of payload.deploymentVersionIds) { + const [versionRow] = await db + .select({ isActive: workflowDeploymentVersion.isActive }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, payload.workflowId), + eq(workflowDeploymentVersion.id, deploymentVersionId) + ) + ) + .limit(1) + + if (!versionRow || versionRow.isActive) continue + await cleanupDeploymentVersionIfInactive({ + workflowId: payload.workflowId, + workflow: workflowData, + userId: payload.userId, + requestId, + deploymentVersionId, + }) + } + + await cleanupNullVersionWebhooksIfStillUndeployed({ + workflowId: payload.workflowId, + workflow: workflowData, + requestId, + }) + + await removeMcpToolsIfStillUndeployed(payload.workflowId, requestId) +} + +async function cleanupInactiveDeploymentVersions(params: { + workflowId: string + activeDeploymentVersionId: string + workflow: Record + userId: string + requestId: string +}): Promise { + const inactiveVersions = await db + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, params.workflowId), + ne(workflowDeploymentVersion.id, params.activeDeploymentVersionId), + eq(workflowDeploymentVersion.isActive, false) + ) + ) + + for (const version of inactiveVersions) { + await cleanupDeploymentVersionIfInactive({ + workflowId: params.workflowId, + workflow: params.workflow, + userId: params.userId, + requestId: params.requestId, + deploymentVersionId: version.id, + }) + } +} + +async function cleanupDeploymentVersionIfInactive(params: { + workflowId: string + deploymentVersionId: string + workflow: Record + userId: string + requestId: string +}): Promise { + if (await isDeploymentVersionActive(params.workflowId, params.deploymentVersionId)) { + await enqueueWorkflowDeploymentSideEffects(db, { + workflowId: params.workflowId, + deploymentVersionId: params.deploymentVersionId, + userId: params.userId, + requestId: params.requestId, + forceRecreateSubscriptions: true, + }) + return + } + + const isStillInactive = async () => + !(await isDeploymentVersionActive(params.workflowId, params.deploymentVersionId)) + + await cleanupWebhooksForWorkflow( + params.workflowId, + params.workflow, + params.requestId, + params.deploymentVersionId, + false, + true, + isStillInactive + ) + + if (!(await isStillInactive())) { + await enqueueWorkflowDeploymentSideEffects(db, { + workflowId: params.workflowId, + deploymentVersionId: params.deploymentVersionId, + userId: params.userId, + requestId: params.requestId, + forceRecreateSubscriptions: true, + }) + return + } + + const deletedSchedules = await deleteSchedulesForDeploymentIfInactive({ + workflowId: params.workflowId, + deploymentVersionId: params.deploymentVersionId, + }) + if (!deletedSchedules) { + if (await isDeploymentVersionActive(params.workflowId, params.deploymentVersionId)) { + await enqueueWorkflowDeploymentSideEffects(db, { + workflowId: params.workflowId, + deploymentVersionId: params.deploymentVersionId, + userId: params.userId, + requestId: params.requestId, + forceRecreateSubscriptions: true, + }) + } + return + } + + if (await isDeploymentVersionActive(params.workflowId, params.deploymentVersionId)) { + await enqueueWorkflowDeploymentSideEffects(db, { + workflowId: params.workflowId, + deploymentVersionId: params.deploymentVersionId, + userId: params.userId, + requestId: params.requestId, + forceRecreateSubscriptions: true, + }) + } +} + +async function deleteSchedulesForDeploymentIfInactive(params: { + workflowId: string + deploymentVersionId: string +}): Promise { + return db.transaction(async (tx) => { + const [versionRow] = await tx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, params.workflowId), + eq(workflowDeploymentVersion.id, params.deploymentVersionId), + eq(workflowDeploymentVersion.isActive, false) + ) + ) + .limit(1) + .for('update') + + if (!versionRow) return false + + await deleteSchedulesForWorkflow(params.workflowId, tx, params.deploymentVersionId) + return true + }) +} + +async function cleanupStaleDeploymentIfNeeded(params: { + payload: SyncActiveSideEffectsPayload + workflow: Record + requestId: string +}): Promise { + if ( + await isDeploymentVersionActive(params.payload.workflowId, params.payload.deploymentVersionId) + ) { + return true + } + + logger.info(`[${params.requestId}] Cleaning up stale deployment side effects`, { + workflowId: params.payload.workflowId, + deploymentVersionId: params.payload.deploymentVersionId, + }) + await cleanupDeploymentVersionIfInactive({ + workflowId: params.payload.workflowId, + workflow: params.workflow, + userId: params.payload.userId, + requestId: params.requestId, + deploymentVersionId: params.payload.deploymentVersionId, + }) + return false +} + +async function isDeploymentVersionActive( + workflowId: string, + deploymentVersionId: string +): Promise { + const [versionRow] = await db + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.id, deploymentVersionId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + + return Boolean(versionRow) +} + +async function removeMcpToolsIfStillUndeployed( + workflowId: string, + requestId: string +): Promise { + const tools = await db.transaction(async (tx) => { + const [workflowRecord] = await tx + .select({ id: workflowTable.id, isDeployed: workflowTable.isDeployed }) + .from(workflowTable) + .where(eq(workflowTable.id, workflowId)) + .limit(1) + .for('update') + + if (!workflowRecord || workflowRecord.isDeployed) return [] + return removeMcpToolsForWorkflow(workflowId, requestId, tx, false, true) + }) + notifyMcpToolServers(tools) +} + +async function cleanupNullVersionWebhooksIfStillUndeployed(params: { + workflowId: string + workflow: Record + requestId: string +}): Promise { + const isStillUndeployed = async () => { + const [workflowRecord] = await db + .select({ isDeployed: workflowTable.isDeployed }) + .from(workflowTable) + .where(eq(workflowTable.id, params.workflowId)) + .limit(1) + + return Boolean(workflowRecord && !workflowRecord.isDeployed) + } + + if (!(await isStillUndeployed())) return + await cleanupWebhooksForWorkflow( + params.workflowId, + params.workflow, + params.requestId, + null, + false, + true, + isStillUndeployed + ) +} + +async function syncMcpToolsIfStillActive(params: { + workflowId: string + deploymentVersionId: string + requestId: string + state: { blocks?: Record } +}): Promise { + const tools = await db.transaction(async (tx) => { + const [workflowRecord] = await tx + .select({ id: workflowTable.id }) + .from(workflowTable) + .where(eq(workflowTable.id, params.workflowId)) + .limit(1) + .for('update') + + if (!workflowRecord) return [] + + const [versionRow] = await tx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, params.workflowId), + eq(workflowDeploymentVersion.id, params.deploymentVersionId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + + if (!versionRow) return [] + + return syncMcpToolsForWorkflow({ + workflowId: params.workflowId, + requestId: params.requestId, + state: params.state, + context: 'deployment-outbox', + tx, + notify: false, + throwOnError: true, + }) + }) + notifyMcpToolServers(tools) +} + +async function createSchedulesIfStillActive(params: { + workflowId: string + deploymentVersionId: string + blocks: Record +}) { + return db.transaction(async (tx) => { + const [workflowRecord] = await tx + .select({ id: workflowTable.id }) + .from(workflowTable) + .where(eq(workflowTable.id, params.workflowId)) + .limit(1) + .for('update') + + if (!workflowRecord) { + return { success: true as const } + } + + const [versionRow] = await tx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, params.workflowId), + eq(workflowDeploymentVersion.id, params.deploymentVersionId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + + if (!versionRow) { + return { success: true as const } + } + + const result = await createSchedulesForDeploy( + params.workflowId, + params.blocks, + tx, + params.deploymentVersionId + ) + if (!result.success) { + throw new Error(result.error || 'Failed to sync schedules') + } + return result + }) +} + +async function pruneWorkflowGroupOutputsIfStillActive(params: { + workflowId: string + deploymentVersionId: string + workspaceId: string + validBlockIds: Set + requestId: string +}): Promise { + await db.transaction(async (tx) => { + const [workflowRecord] = await tx + .select({ id: workflowTable.id }) + .from(workflowTable) + .where(eq(workflowTable.id, params.workflowId)) + .limit(1) + .for('update') + + if (!workflowRecord) return + + const [versionRow] = await tx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, params.workflowId), + eq(workflowDeploymentVersion.id, params.deploymentVersionId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + + if (!versionRow) return + + const { pruneStaleWorkflowGroupOutputs } = await import('@/lib/table/service') + await pruneStaleWorkflowGroupOutputs({ + workflowId: params.workflowId, + workspaceId: params.workspaceId, + validBlockIds: params.validBlockIds, + requestId: params.requestId, + tx, + }) + }) +} + +function parseSyncActiveSideEffectsPayload(payload: unknown): SyncActiveSideEffectsPayload { + const record = parsePayloadRecord(payload) + const workflowId = parseRequiredString(record.workflowId, 'workflowId') + const deploymentVersionId = parseRequiredString(record.deploymentVersionId, 'deploymentVersionId') + const userId = parseRequiredString(record.userId, 'userId') + const requestId = + typeof record.requestId === 'string' && record.requestId.length > 0 + ? record.requestId + : undefined + const forceRecreateSubscriptions = + typeof record.forceRecreateSubscriptions === 'boolean' + ? record.forceRecreateSubscriptions + : undefined + + return { workflowId, deploymentVersionId, userId, requestId, forceRecreateSubscriptions } +} + +function parseCleanupUndeployedSideEffectsPayload( + payload: unknown +): CleanupUndeployedSideEffectsPayload { + const record = parsePayloadRecord(payload) + const workflowId = parseRequiredString(record.workflowId, 'workflowId') + const userId = parseRequiredString(record.userId, 'userId') + const deploymentVersionIds = parseRequiredStringArray( + record.deploymentVersionIds, + 'deploymentVersionIds' + ) + const requestId = + typeof record.requestId === 'string' && record.requestId.length > 0 + ? record.requestId + : undefined + + return { workflowId, deploymentVersionIds, userId, requestId } +} + +function parseCleanupInactiveSideEffectsPayload( + payload: unknown +): CleanupInactiveSideEffectsPayload { + const record = parsePayloadRecord(payload) + const workflowId = parseRequiredString(record.workflowId, 'workflowId') + const activeDeploymentVersionId = parseRequiredString( + record.activeDeploymentVersionId, + 'activeDeploymentVersionId' + ) + const userId = parseRequiredString(record.userId, 'userId') + const requestId = + typeof record.requestId === 'string' && record.requestId.length > 0 + ? record.requestId + : undefined + + return { workflowId, activeDeploymentVersionId, userId, requestId } +} + +function parsePayloadRecord(payload: unknown): Record { + if (!payload || typeof payload !== 'object' || Array.isArray(payload)) { + throw new Error('Deployment outbox payload must be an object') + } + return payload as Record +} + +function parseRequiredString(value: unknown, fieldName: string): string { + if (typeof value !== 'string' || value.length === 0) { + throw new Error(`Deployment outbox payload is missing ${fieldName}`) + } + return value +} + +function parseRequiredStringArray(value: unknown, fieldName: string): string[] { + if ( + !Array.isArray(value) || + value.some((item) => typeof item !== 'string' || item.length === 0) + ) { + throw new Error(`Deployment outbox payload is missing ${fieldName}`) + } + return value +} + +export const workflowDeploymentOutboxHandlers: OutboxHandlerRegistry = { + [WORKFLOW_DEPLOYMENT_OUTBOX_EVENTS.SYNC_ACTIVE_SIDE_EFFECTS]: syncActiveSideEffects, + [WORKFLOW_DEPLOYMENT_OUTBOX_EVENTS.CLEANUP_INACTIVE_SIDE_EFFECTS]: cleanupInactiveSideEffects, + [WORKFLOW_DEPLOYMENT_OUTBOX_EVENTS.CLEANUP_UNDEPLOYED_SIDE_EFFECTS]: cleanupUndeployedSideEffects, +} diff --git a/apps/sim/lib/workflows/orchestration/deploy.test.ts b/apps/sim/lib/workflows/orchestration/deploy.test.ts new file mode 100644 index 00000000000..2fff78e9122 --- /dev/null +++ b/apps/sim/lib/workflows/orchestration/deploy.test.ts @@ -0,0 +1,211 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { + mockLimit, + mockUpdateSet, + mockSaveWorkflowToNormalizedTables, + mockRecordAudit, + mockCaptureServerEvent, + mockTransaction, + mockTx, +} = vi.hoisted(() => ({ + mockLimit: vi.fn(), + mockUpdateSet: vi.fn(), + mockSaveWorkflowToNormalizedTables: vi.fn(), + mockRecordAudit: vi.fn(), + mockCaptureServerEvent: vi.fn(), + mockTransaction: vi.fn(), + mockTx: { + select: vi.fn(() => ({ + from: vi.fn(() => ({ + where: vi.fn(() => ({ + limit: vi.fn(() => ({ + for: vi.fn().mockResolvedValue([{ id: 'workflow-1' }]), + })), + })), + })), + })), + update: vi.fn(() => ({ + set: vi.fn(() => ({ where: vi.fn().mockResolvedValue(undefined) })), + })), + execute: vi.fn().mockResolvedValue(undefined), + }, +})) + +vi.mock('@sim/db', () => ({ + db: { + select: vi.fn(() => ({ + from: vi.fn(() => ({ + where: vi.fn(() => ({ + limit: mockLimit, + })), + })), + })), + update: vi.fn(() => ({ + set: mockUpdateSet, + })), + transaction: mockTransaction, + }, + workflow: { id: 'workflow.id' }, + workflowDeploymentVersion: { + workflowId: 'workflowDeploymentVersion.workflowId', + version: 'workflowDeploymentVersion.version', + isActive: 'workflowDeploymentVersion.isActive', + state: 'workflowDeploymentVersion.state', + }, +})) + +vi.mock('@sim/audit', () => ({ + AuditAction: { WORKFLOW_DEPLOYMENT_REVERTED: 'WORKFLOW_DEPLOYMENT_REVERTED' }, + AuditResourceType: { WORKFLOW: 'WORKFLOW' }, + recordAudit: mockRecordAudit, +})) + +vi.mock('@/lib/core/config/env', () => ({ + env: { INTERNAL_API_SECRET: 'secret' }, +})) + +vi.mock('@/lib/core/utils/urls', () => ({ + getBaseUrl: () => 'http://localhost:3000', + getSocketServerUrl: () => 'http://localhost:3002', +})) + +vi.mock('@/lib/posthog/server', () => ({ + captureServerEvent: mockCaptureServerEvent, +})) + +vi.mock('@/lib/workflows/persistence/utils', () => ({ + activateWorkflowVersion: vi.fn(), + activateWorkflowVersionById: vi.fn(), + deployWorkflow: vi.fn(), + loadWorkflowDeploymentSnapshot: vi.fn(), + saveWorkflowToNormalizedTables: mockSaveWorkflowToNormalizedTables, + undeployWorkflow: vi.fn(), +})) + +vi.mock('@/lib/mcp/workflow-mcp-sync', () => ({ + removeMcpToolsForWorkflow: vi.fn(), + syncMcpToolsForWorkflow: vi.fn(), +})) + +vi.mock('@/lib/webhooks/deploy', () => ({ + cleanupWebhooksForWorkflow: vi.fn(), + restorePreviousVersionWebhooks: vi.fn(), + saveTriggerWebhooksForDeploy: vi.fn(), +})) + +vi.mock('@/lib/workflows/schedules', () => ({ + cleanupDeploymentVersion: vi.fn(), + createSchedulesForDeploy: vi.fn(), + validateWorkflowSchedules: vi.fn(), +})) + +import { performRevertToVersion } from '@/lib/workflows/orchestration/deploy' + +describe('performRevertToVersion', () => { + beforeEach(() => { + vi.clearAllMocks() + vi.stubGlobal('fetch', vi.fn().mockResolvedValue(new Response(null, { status: 200 }))) + mockTransaction.mockImplementation(async (callback) => callback(mockTx)) + mockTx.select.mockImplementation((selection?: Record) => ({ + from: vi.fn(() => ({ + where: vi.fn(() => ({ + limit: + selection && Object.hasOwn(selection, 'state') + ? mockLimit + : vi.fn(() => ({ + for: vi.fn().mockResolvedValue([{ id: 'workflow-1' }]), + })), + })), + })), + })) + mockTx.update.mockReturnValue({ set: mockUpdateSet }) + mockUpdateSet.mockReturnValue({ where: vi.fn().mockResolvedValue(undefined) }) + mockSaveWorkflowToNormalizedTables.mockResolvedValue({ success: true }) + }) + + it('restores variables when the deployment snapshot includes them', async () => { + mockLimit.mockResolvedValue([ + { + state: { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + variables: { + variableA: { + id: 'variableA', + name: 'API_KEY', + type: 'plain', + value: 'deployed-value', + }, + }, + }, + }, + ]) + + const result = await performRevertToVersion({ + workflowId: 'workflow-1', + version: 3, + userId: 'user-1', + workflow: { id: 'workflow-1', name: 'Workflow', workspaceId: 'workspace-1' }, + }) + + expect(result.success).toBe(true) + expect(mockSaveWorkflowToNormalizedTables).toHaveBeenCalledWith( + 'workflow-1', + expect.objectContaining({ + variables: { + variableA: { + id: 'variableA', + name: 'API_KEY', + type: 'plain', + value: 'deployed-value', + }, + }, + }), + mockTx + ) + expect(mockUpdateSet).toHaveBeenCalledWith( + expect.objectContaining({ + variables: { + variableA: { + id: 'variableA', + name: 'API_KEY', + type: 'plain', + value: 'deployed-value', + }, + }, + }) + ) + }) + + it('preserves existing variables when reverting a legacy snapshot without variables', async () => { + mockLimit.mockResolvedValue([ + { + state: { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + }, + }, + ]) + + const result = await performRevertToVersion({ + workflowId: 'workflow-1', + version: 2, + userId: 'user-1', + workflow: { id: 'workflow-1', name: 'Workflow', workspaceId: 'workspace-1' }, + }) + + expect(result.success).toBe(true) + const savedState = mockSaveWorkflowToNormalizedTables.mock.calls[0][1] + expect(Object.hasOwn(savedState, 'variables')).toBe(false) + const workflowUpdate = mockUpdateSet.mock.calls[0][0] + expect(Object.hasOwn(workflowUpdate, 'variables')).toBe(false) + }) +}) diff --git a/apps/sim/lib/workflows/orchestration/deploy.ts b/apps/sim/lib/workflows/orchestration/deploy.ts index 926ed2eeeb4..78b4db442bf 100644 --- a/apps/sim/lib/workflows/orchestration/deploy.ts +++ b/apps/sim/lib/workflows/orchestration/deploy.ts @@ -2,31 +2,25 @@ import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit' import { db, workflowDeploymentVersion, workflow as workflowTable } from '@sim/db' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' -import { NextRequest } from 'next/server' +import type { NextRequest } from 'next/server' import { env } from '@/lib/core/config/env' import { generateRequestId } from '@/lib/core/utils/request' -import { getBaseUrl, getSocketServerUrl } from '@/lib/core/utils/urls' -import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' +import { getSocketServerUrl } from '@/lib/core/utils/urls' import { captureServerEvent } from '@/lib/posthog/server' +import { validateTriggerWebhookConfigForDeploy } from '@/lib/webhooks/deploy' import { - cleanupWebhooksForWorkflow, - restorePreviousVersionWebhooks, - saveTriggerWebhooksForDeploy, -} from '@/lib/webhooks/deploy' + enqueueWorkflowDeploymentSideEffects, + enqueueWorkflowUndeploySideEffects, + processWorkflowDeploymentOutboxEvent, +} from '@/lib/workflows/deployment-outbox' import type { OrchestrationErrorCode } from '@/lib/workflows/orchestration/types' import { activateWorkflowVersion, - activateWorkflowVersionById, deployWorkflow, - loadWorkflowFromNormalizedTables, saveWorkflowToNormalizedTables, undeployWorkflow, } from '@/lib/workflows/persistence/utils' -import { - cleanupDeploymentVersion, - createSchedulesForDeploy, - validateWorkflowSchedules, -} from '@/lib/workflows/schedules' +import { validateWorkflowSchedules } from '@/lib/workflows/schedules' import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types' const logger = createLogger('DeployOrchestration') @@ -83,10 +77,10 @@ export interface PerformFullDeployResult { } /** - * Performs a full workflow deployment: creates a deployment version, syncs - * trigger webhooks, creates schedules, cleans up the previous version, and - * syncs MCP tools. Both the deploy API route and the copilot deploy tools - * must use this single function so behaviour stays consistent. + * Performs a full workflow deployment: creates a deployment version, queues + * external side effects transactionally, processes that outbox event after + * commit, and notifies clients. Both the deploy API route and the copilot + * deploy tools must use this single function so behaviour stays consistent. */ export async function performFullDeploy( params: PerformFullDeployParams @@ -94,21 +88,6 @@ export async function performFullDeploy( const { workflowId, userId, workflowName } = params const actorId = params.actorId ?? userId const requestId = params.requestId ?? generateRequestId() - const request = params.request ?? new NextRequest(new URL('/api/webhooks', getBaseUrl())) - - const normalizedData = await loadWorkflowFromNormalizedTables(workflowId) - if (!normalizedData) { - return { success: false, error: 'Failed to load workflow state', errorCode: 'not_found' } - } - - const scheduleValidation = validateWorkflowSchedules(normalizedData.blocks) - if (!scheduleValidation.isValid) { - return { - success: false, - error: `Invalid schedule configuration: ${scheduleValidation.error}`, - errorCode: 'validation', - } - } const [workflowRecord] = await db .select() @@ -121,135 +100,60 @@ export async function performFullDeploy( } const workflowData = workflowRecord as Record - - const [currentActiveVersion] = await db - .select({ id: workflowDeploymentVersion.id }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.isActive, true) - ) - ) - .limit(1) - const previousVersionId = currentActiveVersion?.id - - const rollbackDeployment = async () => { - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData, - userId, - previousVersionId, - requestId, - }) - const reactivateResult = await activateWorkflowVersionById({ - workflowId, - deploymentVersionId: previousVersionId, - }) - if (reactivateResult.success) return - } - await undeployWorkflow({ workflowId }) - } + let outboxEventId: string | undefined const deployResult = await deployWorkflow({ workflowId, deployedBy: actorId, workflowName: workflowName || workflowRecord.name || undefined, + validateWorkflowState: async (workflowState) => { + const scheduleValidation = validateWorkflowSchedules(workflowState.blocks) + if (!scheduleValidation.isValid) { + return { + success: false, + error: `Invalid schedule configuration: ${scheduleValidation.error}`, + errorCode: 'validation', + } + } + const triggerValidation = await validateTriggerWebhookConfigForDeploy(workflowState.blocks) + if (!triggerValidation.success) { + return { + success: false, + error: triggerValidation.error?.message || 'Invalid trigger configuration', + errorCode: 'validation', + } + } + return { success: true } + }, + onDeployTransaction: async (tx, result) => { + outboxEventId = await enqueueWorkflowDeploymentSideEffects(tx, { + workflowId, + deploymentVersionId: result.deploymentVersionId, + userId, + requestId, + }) + }, }) if (!deployResult.success) { - return { success: false, error: deployResult.error || 'Failed to deploy workflow' } + const error = deployResult.error || 'Failed to deploy workflow' + return { + success: false, + error, + errorCode: deployResult.errorCode, + } } const deployedAt = deployResult.deployedAt! const deploymentVersionId = deployResult.deploymentVersionId + const previousVersionId = deployResult.previousVersionId + const deploymentSnapshot = deployResult.currentState - if (!deploymentVersionId) { + if (!deploymentVersionId || !deploymentSnapshot) { await undeployWorkflow({ workflowId }) return { success: false, error: 'Failed to resolve deployment version' } } - const triggerSaveResult = await saveTriggerWebhooksForDeploy({ - request, - workflowId, - workflow: workflowData, - userId, - blocks: normalizedData.blocks, - requestId, - deploymentVersionId, - previousVersionId, - }) - - if (!triggerSaveResult.success) { - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId, - }) - await rollbackDeployment() - return { - success: false, - error: triggerSaveResult.error?.message || 'Failed to save trigger configuration', - } - } - - const scheduleResult = await createSchedulesForDeploy( - workflowId, - normalizedData.blocks, - db, - deploymentVersionId - ) - if (!scheduleResult.success) { - logger.error(`[${requestId}] Failed to create schedule: ${scheduleResult.error}`) - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId, - }) - await rollbackDeployment() - return { success: false, error: scheduleResult.error || 'Failed to create schedule' } - } - - if (previousVersionId && previousVersionId !== deploymentVersionId) { - try { - await cleanupDeploymentVersion({ - workflowId, - workflow: workflowData, - requestId, - deploymentVersionId: previousVersionId, - skipExternalCleanup: true, - }) - } catch (cleanupError) { - logger.error(`[${requestId}] Failed to clean up previous version`, cleanupError) - } - } - - await syncMcpToolsForWorkflow({ workflowId, requestId, context: 'deploy' }) - - // Drop stale block refs from any table workflow column targeting this workflow, - // so columns that referenced just-removed blocks collapse cleanly instead of - // showing perpetual "Waiting" indicators on future row runs. - if (workflowData.workspaceId) { - try { - const { pruneStaleWorkflowGroupOutputs } = await import('@/lib/table/service') - const validBlockIds = new Set(Object.keys(normalizedData.blocks)) - await pruneStaleWorkflowGroupOutputs({ - workflowId, - workspaceId: workflowData.workspaceId as string, - validBlockIds, - requestId, - }) - } catch (pruneError) { - logger.warn( - `[${requestId}] Failed to prune stale workflow column outputs for ${workflowId}`, - pruneError - ) - } - } - recordAudit({ workspaceId: (workflowData.workspaceId as string) || null, actorId: actorId, @@ -262,11 +166,11 @@ export async function performFullDeploy( deploymentVersionId, version: deployResult.version, previousVersionId: previousVersionId || undefined, - triggerWarnings: triggerSaveResult.warnings?.length ? triggerSaveResult.warnings : undefined, }, - request, + request: params.request, }) + const sideEffectWarning = await processDeploymentSideEffectsNow(outboxEventId, requestId) await notifySocketDeploymentChanged(workflowId) return { @@ -274,7 +178,7 @@ export async function performFullDeploy( deployedAt, version: deployResult.version, deploymentVersionId, - warnings: triggerSaveResult.warnings, + warnings: sideEffectWarning ? [sideEffectWarning] : undefined, } } @@ -289,13 +193,14 @@ export interface PerformFullUndeployParams { export interface PerformFullUndeployResult { success: boolean error?: string + warnings?: string[] } /** - * Performs a full workflow undeploy: marks the workflow as undeployed, cleans up - * webhook records and external subscriptions, removes MCP tools, emits a - * telemetry event, and records an audit log entry. Both the deploy API DELETE - * handler and the copilot undeploy tools must use this single function. + * Performs a full workflow undeploy: marks the workflow as undeployed, queues + * external cleanup transactionally, emits a telemetry event, and records an + * audit log entry. Both the deploy API DELETE handler and the copilot undeploy + * tools must use this single function. */ export async function performFullUndeploy( params: PerformFullUndeployParams @@ -315,15 +220,23 @@ export async function performFullUndeploy( } const workflowData = workflowRecord as Record + let outboxEventId: string | undefined - const result = await undeployWorkflow({ workflowId }) + const result = await undeployWorkflow({ + workflowId, + onUndeployTransaction: async (tx, undeploy) => { + outboxEventId = await enqueueWorkflowUndeploySideEffects(tx, { + workflowId, + deploymentVersionIds: undeploy.deploymentVersionIds, + userId, + requestId, + }) + }, + }) if (!result.success) { return { success: false, error: result.error || 'Failed to undeploy workflow' } } - await cleanupWebhooksForWorkflow(workflowId, workflowData, requestId) - await removeMcpToolsForWorkflow(workflowId, requestId) - logger.info(`[${requestId}] Workflow undeployed successfully: ${workflowId}`) try { @@ -344,8 +257,9 @@ export async function performFullUndeploy( }) await notifySocketDeploymentChanged(workflowId) + const sideEffectWarning = await processDeploymentSideEffectsNow(outboxEventId, requestId) - return { success: true } + return { success: true, warnings: sideEffectWarning ? [sideEffectWarning] : undefined } } export interface PerformActivateVersionParams { @@ -387,11 +301,10 @@ export interface PerformRevertToVersionResult { } /** - * Activates an existing deployment version: validates schedules, syncs trigger - * webhooks (with forced subscription recreation), creates schedules, activates - * the version, cleans up the previous version, syncs MCP tools, and records - * an audit entry. Both the deployment version PATCH handler and the admin - * activate route must use this function. + * Activates an existing deployment version: validates schedules, activates the + * version, queues external side effects transactionally, processes that outbox + * event after commit, and records an audit entry. Both the deployment version + * PATCH handler and the admin activate route must use this function. */ export async function performActivateVersion( params: PerformActivateVersionParams @@ -399,12 +312,12 @@ export async function performActivateVersion( const { workflowId, version, userId, workflow } = params const actorId = params.actorId ?? userId const requestId = params.requestId ?? generateRequestId() - const request = params.request ?? new NextRequest(new URL('/api/webhooks', getBaseUrl())) const [versionRow] = await db .select({ id: workflowDeploymentVersion.id, state: workflowDeploymentVersion.state, + isActive: workflowDeploymentVersion.isActive, }) .from(workflowDeploymentVersion) .where( @@ -419,24 +332,22 @@ export async function performActivateVersion( return { success: false, error: 'Deployment version not found', errorCode: 'not_found' } } + if (versionRow.isActive) { + const [workflowDeployment] = await db + .select({ deployedAt: workflowTable.deployedAt }) + .from(workflowTable) + .where(eq(workflowTable.id, workflowId)) + .limit(1) + + return { success: true, deployedAt: workflowDeployment?.deployedAt ?? new Date(), warnings: [] } + } + const deployedState = versionRow.state as { blocks?: Record } const blocks = deployedState.blocks if (!blocks || typeof blocks !== 'object') { return { success: false, error: 'Invalid deployed state structure', errorCode: 'validation' } } - const [currentActiveVersion] = await db - .select({ id: workflowDeploymentVersion.id }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.isActive, true) - ) - ) - .limit(1) - const previousVersionId = currentActiveVersion?.id - const scheduleValidation = validateWorkflowSchedules(blocks as Record) if (!scheduleValidation.isValid) { return { @@ -446,101 +357,35 @@ export async function performActivateVersion( } } - const triggerSaveResult = await saveTriggerWebhooksForDeploy({ - request, - workflowId, - workflow, - userId, - blocks: blocks as Record, - requestId, - deploymentVersionId: versionRow.id, - previousVersionId, - forceRecreateSubscriptions: true, - }) - - if (!triggerSaveResult.success) { - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow, - userId, - previousVersionId, - requestId, - }) - } + const triggerValidation = await validateTriggerWebhookConfigForDeploy( + blocks as Record + ) + if (!triggerValidation.success) { return { success: false, - error: triggerSaveResult.error?.message || 'Failed to sync trigger configuration', + error: triggerValidation.error?.message || 'Invalid trigger configuration', + errorCode: 'validation', } } - const scheduleResult = await createSchedulesForDeploy( + let outboxEventId: string | undefined + const result = await activateWorkflowVersion({ workflowId, - blocks as Record, - db, - versionRow.id - ) - - if (!scheduleResult.success) { - await cleanupDeploymentVersion({ - workflowId, - workflow, - requestId, - deploymentVersionId: versionRow.id, - }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow, + version, + onActivateTransaction: async (tx, activation) => { + outboxEventId = await enqueueWorkflowDeploymentSideEffects(tx, { + workflowId, + deploymentVersionId: activation.deploymentVersionId, userId, - previousVersionId, requestId, + forceRecreateSubscriptions: true, }) - } - return { success: false, error: scheduleResult.error || 'Failed to sync schedules' } - } - - const result = await activateWorkflowVersion({ workflowId, version }) + }, + }) if (!result.success) { - await cleanupDeploymentVersion({ - workflowId, - workflow, - requestId, - deploymentVersionId: versionRow.id, - }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow, - userId, - previousVersionId, - requestId, - }) - } return { success: false, error: result.error || 'Failed to activate version' } } - if (previousVersionId && previousVersionId !== versionRow.id) { - try { - await cleanupDeploymentVersion({ - workflowId, - workflow, - requestId, - deploymentVersionId: previousVersionId, - skipExternalCleanup: true, - }) - } catch (cleanupError) { - logger.error(`[${requestId}] Failed to clean up previous version`, cleanupError) - } - } - - await syncMcpToolsForWorkflow({ - workflowId, - requestId, - state: versionRow.state as { blocks?: Record }, - context: 'activate', - }) - recordAudit({ workspaceId: (workflow.workspaceId as string) || null, actorId: actorId, @@ -552,16 +397,50 @@ export async function performActivateVersion( metadata: { version, deploymentVersionId: versionRow.id, - previousVersionId: previousVersionId || undefined, + previousVersionId: result.previousVersionId || undefined, }, }) + const sideEffectWarning = await processDeploymentSideEffectsNow(outboxEventId, requestId) await notifySocketDeploymentChanged(workflowId) return { success: true, deployedAt: result.deployedAt, - warnings: triggerSaveResult.warnings, + warnings: sideEffectWarning ? [sideEffectWarning] : undefined, + } +} + +async function processDeploymentSideEffectsNow( + outboxEventId: string | undefined, + requestId: string +): Promise { + if (!outboxEventId) { + return 'Deployment state changed, but side-effect sync was not queued. Redeploy if triggers or schedules look stale.' + } + + try { + const result = await processWorkflowDeploymentOutboxEvent(outboxEventId) + if (result === 'completed') return undefined + if (result === 'dead_letter' || result === 'not_found') { + logger.error(`[${requestId}] Deployment side-effect sync cannot be retried automatically`, { + outboxEventId, + result, + }) + return 'Deployment saved, but trigger, schedule, and MCP sync could not be queued. Redeploy if triggers or schedules look stale.' + } + + logger.warn(`[${requestId}] Deployment side-effect sync queued for retry`, { + outboxEventId, + result, + }) + return 'Deployment saved. Trigger, schedule, and MCP sync is queued and may finish shortly.' + } catch (error) { + logger.warn(`[${requestId}] Deployment side-effect sync queued for retry`, { + outboxEventId, + error, + }) + return 'Deployment saved. Trigger, schedule, and MCP sync is queued and may finish shortly.' } } @@ -577,73 +456,93 @@ export async function performRevertToVersion( const actorId = params.actorId ?? userId const versionLabel = String(version) - let stateRow: { state: unknown } | null = null - if (version === 'active') { - const [row] = await db - .select({ state: workflowDeploymentVersion.state }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.isActive, true) - ) - ) - .limit(1) - stateRow = row || null - } else { - const [row] = await db - .select({ state: workflowDeploymentVersion.state }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.version, version) - ) - ) + const lastSaved = Date.now() + const saveResult = await db.transaction(async (tx) => { + await tx + .select({ id: workflowTable.id }) + .from(workflowTable) + .where(eq(workflowTable.id, workflowId)) .limit(1) - stateRow = row || null - } + .for('update') + + const [stateRow] = + version === 'active' + ? await tx + .select({ state: workflowDeploymentVersion.state }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + : await tx + .select({ state: workflowDeploymentVersion.state }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.version, version) + ) + ) + .limit(1) + + if (!stateRow?.state) { + return { success: false, error: 'Deployment version not found' } + } - if (!stateRow?.state) { - return { success: false, error: 'Deployment version not found', errorCode: 'not_found' } - } + const deployedState = stateRow.state as { + blocks?: Record + edges?: unknown[] + loops?: Record + parallels?: Record + variables?: WorkflowState['variables'] + } + if (!deployedState.blocks || !deployedState.edges) { + return { success: false, error: 'Invalid deployed state structure' } + } - const deployedState = stateRow.state as { - blocks?: Record - edges?: unknown[] - loops?: Record - parallels?: Record - } - if (!deployedState.blocks || !deployedState.edges) { - return { - success: false, - error: 'Invalid deployed state structure', - errorCode: 'internal', + const hasDeploymentVariables = Object.hasOwn(deployedState, 'variables') + const restoredState: WorkflowState = { + blocks: deployedState.blocks, + edges: deployedState.edges, + loops: deployedState.loops || {}, + parallels: deployedState.parallels || {}, + lastSaved, + } as WorkflowState + if (hasDeploymentVariables) { + restoredState.variables = deployedState.variables || {} } - } - const lastSaved = Date.now() - const saveResult = await saveWorkflowToNormalizedTables(workflowId, { - blocks: deployedState.blocks, - edges: deployedState.edges, - loops: deployedState.loops || {}, - parallels: deployedState.parallels || {}, - lastSaved, - } as WorkflowState) + const result = await saveWorkflowToNormalizedTables(workflowId, restoredState, tx) + if (!result.success) return result + + await tx + .update(workflowTable) + .set({ + ...(hasDeploymentVariables ? { variables: deployedState.variables || {} } : {}), + lastSynced: new Date(), + updatedAt: new Date(), + }) + .where(eq(workflowTable.id, workflowId)) + + return result + }) if (!saveResult.success) { return { success: false, error: saveResult.error || 'Failed to save deployed state', - errorCode: 'internal', + errorCode: + saveResult.error === 'Deployment version not found' + ? 'not_found' + : saveResult.error === 'Invalid deployed state structure' + ? 'internal' + : 'internal', } } - await db - .update(workflowTable) - .set({ lastSynced: new Date(), updatedAt: new Date() }) - .where(eq(workflowTable.id, workflowId)) - try { await fetch(`${getSocketServerUrl()}/api/workflow-reverted`, { method: 'POST', diff --git a/apps/sim/lib/workflows/persistence/utils.test.ts b/apps/sim/lib/workflows/persistence/utils.test.ts index 3a8f3c47cba..82997c4f518 100644 --- a/apps/sim/lib/workflows/persistence/utils.test.ts +++ b/apps/sim/lib/workflows/persistence/utils.test.ts @@ -305,6 +305,42 @@ describe('Database Helpers', () => { vi.resetAllMocks() }) + describe('buildWorkflowDeploymentSnapshot', () => { + it('combines normalized workflow state with persisted variables', () => { + const snapshot = dbHelpers.buildWorkflowDeploymentSnapshot( + { + blocks: asAppBlocks({ block: createStarterBlock({ id: 'block' }) }), + edges: [], + loops: {}, + parallels: {}, + isFromNormalizedTables: true, + }, + { + variable: { + id: 'variable', + name: 'threshold', + type: 'number', + value: 5, + }, + } + ) + + expect(snapshot.blocks.block).toBeDefined() + expect(snapshot.edges).toEqual([]) + expect(snapshot.loops).toEqual({}) + expect(snapshot.parallels).toEqual({}) + expect(snapshot.variables).toEqual({ + variable: { + id: 'variable', + name: 'threshold', + type: 'number', + value: 5, + }, + }) + expect(snapshot.lastSaved).toEqual(expect.any(Number)) + }) + }) + describe('loadWorkflowFromNormalizedTables', () => { it('should successfully load workflow data from normalized tables', async () => { vi.clearAllMocks() @@ -762,6 +798,58 @@ describe('Database Helpers', () => { }) }) + describe('workflow row locking', () => { + function createMissingWorkflowTx() { + const lockFor = vi.fn().mockResolvedValue([]) + const limit = vi.fn(() => ({ for: lockFor })) + const where = vi.fn(() => ({ limit })) + const from = vi.fn(() => ({ where })) + const select = vi.fn(() => ({ from })) + const update = vi.fn() + + return { + tx: { + execute: vi.fn().mockResolvedValue([{ id: mockWorkflowId }]), + select, + update, + }, + lockFor, + update, + } + } + + it('returns not_found when deploy cannot lock a workflow row', async () => { + const { tx, lockFor } = createMissingWorkflowTx() + mockDb.transaction = vi.fn().mockImplementation(async (callback) => callback(tx)) + + const result = await dbHelpers.deployWorkflow({ + workflowId: mockWorkflowId, + deployedBy: 'user-123', + }) + + expect(result).toEqual({ + success: false, + error: 'Workflow not found', + errorCode: 'not_found', + }) + expect(lockFor).toHaveBeenCalledWith('update') + expect(tx.execute).not.toHaveBeenCalled() + }) + + it('returns an error when undeploy cannot lock a workflow row', async () => { + const { tx, update } = createMissingWorkflowTx() + mockDb.transaction = vi.fn().mockImplementation(async (callback) => callback(tx)) + + const result = await dbHelpers.undeployWorkflow({ workflowId: mockWorkflowId }) + + expect(result).toEqual({ + success: false, + error: 'Workflow not found', + }) + expect(update).not.toHaveBeenCalled() + }) + }) + describe('error handling and edge cases', () => { it('should handle very large workflow data', async () => { const blocks: Record> = {} diff --git a/apps/sim/lib/workflows/persistence/utils.ts b/apps/sim/lib/workflows/persistence/utils.ts index c8f4d883c38..e9b409c8c67 100644 --- a/apps/sim/lib/workflows/persistence/utils.ts +++ b/apps/sim/lib/workflows/persistence/utils.ts @@ -25,6 +25,34 @@ const logger = createLogger('WorkflowDBHelpers') export type { DbOrTx, NormalizedWorkflowData } from '@sim/workflow-persistence/types' export type WorkflowDeploymentVersion = InferSelectModel +function hasReturnedRows(result: unknown): boolean { + if (Array.isArray(result)) return result.length > 0 + + if (result && typeof result === 'object') { + const rows = 'rows' in result ? result.rows : undefined + if (Array.isArray(rows)) return rows.length > 0 + } + + return Boolean(result) +} + +async function lockWorkflowForUpdate(tx: DbOrTx, workflowId: string): Promise { + const query = tx.select({ id: workflow.id }).from(workflow).where(eq(workflow.id, workflowId)) + + if ('limit' in query && typeof query.limit === 'function') { + const limited = query.limit(1) + const rows = + 'for' in limited && typeof limited.for === 'function' + ? await limited.for('update') + : await limited + return hasReturnedRows(rows) + } + + const rows = await query + + return hasReturnedRows(rows) +} + export interface WorkflowDeploymentVersionResponse { id: string version: number @@ -343,16 +371,17 @@ async function migrateCredentialIds( * has not been migrated to normalized tables yet. */ export async function loadWorkflowFromNormalizedTables( - workflowId: string + workflowId: string, + externalTx?: DbOrTx ): Promise { - const raw = await loadWorkflowFromNormalizedTablesRaw(workflowId) + const raw = await loadWorkflowFromNormalizedTablesRaw(workflowId, externalTx) if (!raw) return null const { blocks: finalBlocks, migrated } = await applyBlockMigrations(raw.blocks, raw.workspaceId) if (migrated) { Promise.resolve().then(() => - persistMigratedBlocks(workflowId, raw.blocks, finalBlocks, raw.blockUpdatedAt) + persistMigratedBlocks(workflowId, raw.blocks, finalBlocks, raw.blockUpdatedAtById) ) } @@ -382,12 +411,68 @@ export async function loadWorkflowFromNormalizedTables( } } +export async function loadWorkflowDeploymentSnapshot( + workflowId: string, + externalTx?: DbOrTx +): Promise { + const loadSnapshot = async (tx: DbOrTx) => { + const [normalizedData, [workflowRecord]] = await Promise.all([ + loadWorkflowFromNormalizedTables(workflowId, tx), + tx + .select({ variables: workflow.variables }) + .from(workflow) + .where(eq(workflow.id, workflowId)) + .limit(1), + ]) + + if (!normalizedData) return null + + return buildWorkflowDeploymentSnapshot(normalizedData, workflowRecord?.variables) + } + + if (externalTx) { + return loadSnapshot(externalTx) + } + + return db.transaction(async (tx) => { + await tx.execute(sql`SET TRANSACTION ISOLATION LEVEL REPEATABLE READ`) + return loadSnapshot(tx) + }) +} + +export function buildWorkflowDeploymentSnapshot( + normalizedData: NormalizedWorkflowData, + variables: unknown +): WorkflowState { + return { + blocks: normalizedData.blocks, + edges: normalizedData.edges, + loops: normalizedData.loops, + parallels: normalizedData.parallels, + variables: (variables as WorkflowState['variables']) || {}, + lastSaved: Date.now(), + } +} + export async function saveWorkflowToNormalizedTables( workflowId: string, state: WorkflowState, externalTx?: DbOrTx ): Promise<{ success: boolean; error?: string }> { - return saveWorkflowToNormalizedTablesRaw(workflowId, state, externalTx) + if (externalTx) { + return saveWorkflowToNormalizedTablesRaw(workflowId, state, externalTx) + } + + try { + return await db.transaction(async (tx) => { + await lockWorkflowForUpdate(tx, workflowId) + return saveWorkflowToNormalizedTablesRaw(workflowId, state, tx) + }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Failed to save workflow state' + logger.error(`Error saving workflow ${workflowId} to normalized tables:`, error) + return { success: false, error: message } + } } export async function workflowExistsInNormalizedTables(workflowId: string): Promise { @@ -406,44 +491,77 @@ export async function workflowExistsInNormalizedTables(workflowId: string): Prom } } +type DeployWorkflowValidationResult = + | { success: true } + | { success: false; error: string; errorCode?: 'validation' } + export async function deployWorkflow(params: { workflowId: string deployedBy: string workflowName?: string + workflowState?: WorkflowState + validateWorkflowState?: ( + workflowState: WorkflowState + ) => DeployWorkflowValidationResult | Promise + onDeployTransaction?: ( + tx: DbOrTx, + result: { deploymentVersionId: string; version: number; previousVersionId?: string } + ) => Promise }): Promise<{ success: boolean version?: number deploymentVersionId?: string deployedAt?: Date - currentState?: any + previousVersionId?: string + currentState?: WorkflowState error?: string + errorCode?: 'validation' | 'not_found' }> { const { workflowId, deployedBy, workflowName } = params try { - const normalizedData = await loadWorkflowFromNormalizedTables(workflowId) - if (!normalizedData) { - return { success: false, error: 'Failed to load workflow state' } - } + const now = new Date() + let currentState: WorkflowState | null = null - const [workflowRecord] = await db - .select({ variables: workflow.variables }) - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const deployedVersion = await db.transaction(async (tx) => { + if (!(await lockWorkflowForUpdate(tx, workflowId))) { + return { + success: false as const, + error: 'Workflow not found', + errorCode: 'not_found' as const, + } + } - const currentState = { - blocks: normalizedData.blocks, - edges: normalizedData.edges, - loops: normalizedData.loops, - parallels: normalizedData.parallels, - variables: workflowRecord?.variables || undefined, - lastSaved: Date.now(), - } + currentState = params.workflowState ?? (await loadWorkflowDeploymentSnapshot(workflowId, tx)) + if (!currentState) { + return { + success: false as const, + error: 'Failed to load workflow state', + errorCode: 'validation' as const, + } + } - const now = new Date() + const validationError = await params.validateWorkflowState?.(currentState) + if (validationError && !validationError.success) { + return { + success: false as const, + error: validationError.error, + errorCode: validationError.errorCode, + } + } + + const [currentActiveVersion] = await tx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .limit(1) + const previousVersionId = currentActiveVersion?.id - const deployedVersion = await db.transaction(async (tx) => { const [{ maxVersion }] = await tx .select({ maxVersion: sql`COALESCE(MAX("version"), 0)` }) .from(workflowDeploymentVersion) @@ -474,9 +592,33 @@ export async function deployWorkflow(params: { await tx.update(workflow).set(updateData).where(eq(workflow.id, workflowId)) - return { version: nextVersion, deploymentVersionId } + await params.onDeployTransaction?.(tx, { + deploymentVersionId, + version: nextVersion, + previousVersionId, + }) + + return { + success: true as const, + version: nextVersion, + deploymentVersionId, + previousVersionId, + currentState, + } }) + if (!deployedVersion.success) { + return { + success: false, + error: deployedVersion.error, + errorCode: deployedVersion.errorCode, + } + } + const deployedState = deployedVersion.currentState + if (!deployedState) { + return { success: false, error: 'Failed to load workflow state' } + } + logger.info(`Deployed workflow ${workflowId} as v${deployedVersion.version}`) if (workflowName) { @@ -484,7 +626,7 @@ export async function deployWorkflow(params: { const { PlatformEvents } = await import('@/lib/core/telemetry') const blockTypeCounts: Record = {} - for (const block of Object.values(currentState.blocks)) { + for (const block of Object.values(deployedState.blocks)) { const blockType = block.type || 'unknown' blockTypeCounts[blockType] = (blockTypeCounts[blockType] || 0) + 1 } @@ -492,11 +634,11 @@ export async function deployWorkflow(params: { PlatformEvents.workflowDeployed({ workflowId, workflowName, - blocksCount: Object.keys(currentState.blocks).length, - edgesCount: currentState.edges.length, + blocksCount: Object.keys(deployedState.blocks).length, + edgesCount: deployedState.edges.length, version: deployedVersion.version, - loopsCount: Object.keys(currentState.loops).length, - parallelsCount: Object.keys(currentState.parallels).length, + loopsCount: Object.keys(deployedState.loops).length, + parallelsCount: Object.keys(deployedState.parallels).length, blockTypes: JSON.stringify(blockTypeCounts), }) } catch (telemetryError) { @@ -508,8 +650,9 @@ export async function deployWorkflow(params: { success: true, version: deployedVersion.version, deploymentVersionId: deployedVersion.deploymentVersionId, + previousVersionId: deployedVersion.previousVersionId, deployedAt: now, - currentState, + currentState: deployedState, } } catch (error) { logger.error(`Error deploying workflow ${workflowId}:`, error) @@ -668,13 +811,27 @@ export function regenerateWorkflowStateIds(state: RegenerateStateInput): Regener } } -export async function undeployWorkflow(params: { workflowId: string; tx?: DbOrTx }): Promise<{ +export async function undeployWorkflow(params: { + workflowId: string + tx?: DbOrTx + onUndeployTransaction?: (tx: DbOrTx, result: { deploymentVersionIds: string[] }) => Promise +}): Promise<{ success: boolean error?: string }> { const { workflowId, tx } = params const executeUndeploy = async (dbCtx: DbOrTx) => { + if (!(await lockWorkflowForUpdate(dbCtx, workflowId))) { + throw new Error('Workflow not found') + } + + const deploymentVersions = await dbCtx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where(eq(workflowDeploymentVersion.workflowId, workflowId)) + const deploymentVersionIds = deploymentVersions.map((version) => version.id) + const { deleteSchedulesForWorkflow } = await import('@/lib/workflows/schedules/deploy') await deleteSchedulesForWorkflow(workflowId, dbCtx) @@ -687,6 +844,8 @@ export async function undeployWorkflow(params: { workflowId: string; tx?: DbOrTx .update(workflow) .set({ isDeployed: false, deployedAt: null }) .where(eq(workflow.id, workflowId)) + + await params.onUndeployTransaction?.(dbCtx, { deploymentVersionIds }) } try { @@ -712,33 +871,55 @@ export async function undeployWorkflow(params: { workflowId: string; tx?: DbOrTx export async function activateWorkflowVersion(params: { workflowId: string version: number + onActivateTransaction?: ( + tx: DbOrTx, + result: { deploymentVersionId: string; previousVersionId?: string } + ) => Promise }): Promise<{ success: boolean deployedAt?: Date state?: unknown + previousVersionId?: string error?: string }> { const { workflowId, version } = params try { - const [versionData] = await db - .select({ id: workflowDeploymentVersion.id, state: workflowDeploymentVersion.state }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.version, version) + const now = new Date() + let versionState: unknown + + const result = await db.transaction(async (tx) => { + if (!(await lockWorkflowForUpdate(tx, workflowId))) { + return { success: false as const, error: 'Workflow not found' } + } + + const [currentActiveVersion] = await tx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.isActive, true) + ) ) - ) - .limit(1) + .limit(1) - if (!versionData) { - return { success: false, error: 'Deployment version not found' } - } + const [versionData] = await tx + .select({ id: workflowDeploymentVersion.id, state: workflowDeploymentVersion.state }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.version, version) + ) + ) + .limit(1) - const now = new Date() + if (!versionData) { + return { success: false as const, error: 'Deployment version not found' } + } + versionState = versionData.state - await db.transaction(async (tx) => { await tx .update(workflowDeploymentVersion) .set({ isActive: false }) @@ -763,14 +944,26 @@ export async function activateWorkflowVersion(params: { .update(workflow) .set({ isDeployed: true, deployedAt: now }) .where(eq(workflow.id, workflowId)) + + await params.onActivateTransaction?.(tx, { + deploymentVersionId: versionData.id, + previousVersionId: currentActiveVersion?.id, + }) + + return { success: true as const, previousVersionId: currentActiveVersion?.id } }) + if (!result.success) { + return { success: false, error: result.error } + } + logger.info(`Activated version ${version} for workflow ${workflowId}`) return { success: true, deployedAt: now, - state: versionData.state, + state: versionState, + previousVersionId: result.previousVersionId, } } catch (error) { logger.error(`Error activating version ${version} for workflow ${workflowId}:`, error) @@ -788,29 +981,47 @@ export async function activateWorkflowVersionById(params: { success: boolean deployedAt?: Date state?: unknown + previousVersionId?: string error?: string }> { const { workflowId, deploymentVersionId } = params try { - const [versionData] = await db - .select({ id: workflowDeploymentVersion.id, state: workflowDeploymentVersion.state }) - .from(workflowDeploymentVersion) - .where( - and( - eq(workflowDeploymentVersion.workflowId, workflowId), - eq(workflowDeploymentVersion.id, deploymentVersionId) + const now = new Date() + let versionState: unknown + + const result = await db.transaction(async (tx) => { + if (!(await lockWorkflowForUpdate(tx, workflowId))) { + return { success: false as const, error: 'Workflow not found' } + } + + const [currentActiveVersion] = await tx + .select({ id: workflowDeploymentVersion.id }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.isActive, true) + ) ) - ) - .limit(1) + .limit(1) - if (!versionData) { - return { success: false, error: 'Deployment version not found' } - } + const [versionData] = await tx + .select({ id: workflowDeploymentVersion.id, state: workflowDeploymentVersion.state }) + .from(workflowDeploymentVersion) + .where( + and( + eq(workflowDeploymentVersion.workflowId, workflowId), + eq(workflowDeploymentVersion.id, deploymentVersionId) + ) + ) + .limit(1) - const now = new Date() + if (!versionData) { + return { success: false as const, error: 'Deployment version not found' } + } + versionState = versionData.state - await db.transaction(async (tx) => { await tx .update(workflowDeploymentVersion) .set({ isActive: false }) @@ -830,14 +1041,21 @@ export async function activateWorkflowVersionById(params: { .update(workflow) .set({ isDeployed: true, deployedAt: now }) .where(eq(workflow.id, workflowId)) + + return { success: true as const, previousVersionId: currentActiveVersion?.id } }) + if (!result.success) { + return { success: false, error: result.error } + } + logger.info(`Activated deployment version ${deploymentVersionId} for workflow ${workflowId}`) return { success: true, deployedAt: now, - state: versionData.state, + state: versionState, + previousVersionId: result.previousVersionId, } } catch (error) { logger.error( diff --git a/apps/sim/lib/workflows/schedules/deploy.ts b/apps/sim/lib/workflows/schedules/deploy.ts index f413b2665e6..466c4ee171e 100644 --- a/apps/sim/lib/workflows/schedules/deploy.ts +++ b/apps/sim/lib/workflows/schedules/deploy.ts @@ -28,7 +28,7 @@ export interface ScheduleDeployResult { export async function createSchedulesForDeploy( workflowId: string, blocks: Record, - _tx: DbOrTx, + dbCtx: DbOrTx, deploymentVersionId?: string ): Promise { const scheduleBlocks = findScheduleBlocks(blocks) @@ -72,7 +72,7 @@ export async function createSchedulesForDeploy( } | null = null try { - await db.transaction(async (tx) => { + const writeSchedules = async (tx: DbOrTx) => { const currentBlockIds = new Set(validatedBlocks.map((b) => b.blockId)) const existingSchedules = await tx @@ -151,7 +151,13 @@ export async function createSchedulesForDeploy( lastScheduleInfo = { scheduleId: values.id, cronExpression, nextRunAt, timezone } } - }) + } + + if (dbCtx === db || !hasScheduleWriteMethods(dbCtx)) { + await db.transaction(writeSchedules) + } else { + await writeSchedules(dbCtx) + } } catch (error) { logger.error(`Failed to create schedules for workflow ${workflowId}`, error) return { @@ -166,6 +172,15 @@ export async function createSchedulesForDeploy( } } +function hasScheduleWriteMethods(value: DbOrTx): boolean { + const candidate = value as Partial> + return ( + typeof candidate.select === 'function' && + typeof candidate.insert === 'function' && + typeof candidate.delete === 'function' + ) +} + /** * Delete all schedules for a workflow * This should be called within a database transaction during undeploy @@ -204,6 +219,7 @@ export async function cleanupDeploymentVersion(params: { * Only deletes DB records. */ skipExternalCleanup?: boolean + strictExternalCleanup?: boolean }): Promise { const { workflowId, @@ -211,13 +227,15 @@ export async function cleanupDeploymentVersion(params: { requestId, deploymentVersionId, skipExternalCleanup = false, + strictExternalCleanup = false, } = params await cleanupWebhooksForWorkflow( workflowId, workflow, requestId, deploymentVersionId, - skipExternalCleanup + skipExternalCleanup, + strictExternalCleanup ) await deleteSchedulesForWorkflow(workflowId, db, deploymentVersionId) } diff --git a/apps/sim/stores/operation-queue/store.test.ts b/apps/sim/stores/operation-queue/store.test.ts index b86a3da2561..b18439f60cc 100644 --- a/apps/sim/stores/operation-queue/store.test.ts +++ b/apps/sim/stores/operation-queue/store.test.ts @@ -9,6 +9,7 @@ describe('operation queue room gating', () => { vi.clearAllMocks() useOperationQueueStore.setState({ operations: [], + workflowOperationVersions: {}, isProcessing: false, hasOperationError: false, }) @@ -18,6 +19,7 @@ describe('operation queue room gating', () => { afterEach(() => { useOperationQueueStore.setState({ operations: [], + workflowOperationVersions: {}, isProcessing: false, hasOperationError: false, }) @@ -71,4 +73,298 @@ describe('operation queue room gating', () => { useOperationQueueStore.getState().confirmOperation('op-1') }) + + it('reports pending operations per workflow', () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'replace-state', + target: 'workflow', + payload: { state: {} }, + }, + }) + + expect(useOperationQueueStore.getState().hasPendingOperations('workflow-a')).toBe(true) + expect(useOperationQueueStore.getState().hasPendingOperations('workflow-b')).toBe(false) + }) + + it('tracks local operation activity per workflow', () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'replace-state', + target: 'workflow', + payload: { state: {} }, + }, + }) + + expect(useOperationQueueStore.getState().workflowOperationVersions['workflow-a']).toBe(1) + expect( + useOperationQueueStore.getState().workflowOperationVersions['workflow-b'] + ).toBeUndefined() + }) + + it('coalesces pending subblock updates to the latest value for the same field', () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'subblock-update', + target: 'subblock', + payload: { + blockId: 'block-1', + subblockId: 'prompt', + value: 'old value', + }, + }, + }) + useOperationQueueStore.getState().addToQueue({ + id: 'op-2', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'subblock-update', + target: 'subblock', + payload: { + blockId: 'block-1', + subblockId: 'prompt', + value: 'new value', + }, + }, + }) + + expect(useOperationQueueStore.getState().operations).toEqual([ + expect.objectContaining({ + id: 'op-2', + operation: expect.objectContaining({ + payload: expect.objectContaining({ value: 'new value' }), + }), + }), + ]) + }) + + it('does not coalesce matching subblock updates across workflows', () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'subblock-update', + target: 'subblock', + payload: { + blockId: 'block-1', + subblockId: 'prompt', + value: 'workflow a value', + }, + }, + }) + useOperationQueueStore.getState().addToQueue({ + id: 'op-2', + workflowId: 'workflow-b', + userId: 'user-1', + operation: { + operation: 'subblock-update', + target: 'subblock', + payload: { + blockId: 'block-1', + subblockId: 'prompt', + value: 'workflow b value', + }, + }, + }) + + expect(useOperationQueueStore.getState().operations).toEqual([ + expect.objectContaining({ + id: 'op-1', + workflowId: 'workflow-a', + }), + expect.objectContaining({ + id: 'op-2', + workflowId: 'workflow-b', + }), + ]) + }) + + it('coalesces variable field updates without dropping unrelated fields', () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'variable-update', + target: 'variable', + payload: { + variableId: 'variable-1', + field: 'value', + value: 'old value', + }, + }, + }) + useOperationQueueStore.getState().addToQueue({ + id: 'op-2', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'variable-update', + target: 'variable', + payload: { + variableId: 'variable-1', + field: 'name', + value: 'Variable Name', + }, + }, + }) + useOperationQueueStore.getState().addToQueue({ + id: 'op-3', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'variable-update', + target: 'variable', + payload: { + variableId: 'variable-1', + field: 'value', + value: 'new value', + }, + }, + }) + + expect(useOperationQueueStore.getState().operations).toEqual([ + expect.objectContaining({ + id: 'op-2', + operation: expect.objectContaining({ + payload: expect.objectContaining({ field: 'name', value: 'Variable Name' }), + }), + }), + expect.objectContaining({ + id: 'op-3', + operation: expect.objectContaining({ + payload: expect.objectContaining({ field: 'value', value: 'new value' }), + }), + }), + ]) + }) + + it('does not coalesce matching variable updates across workflows', () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'variable-update', + target: 'variable', + payload: { + variableId: 'variable-1', + field: 'value', + value: 'workflow a value', + }, + }, + }) + useOperationQueueStore.getState().addToQueue({ + id: 'op-2', + workflowId: 'workflow-b', + userId: 'user-1', + operation: { + operation: 'variable-update', + target: 'variable', + payload: { + variableId: 'variable-1', + field: 'value', + value: 'workflow b value', + }, + }, + }) + + expect(useOperationQueueStore.getState().operations).toEqual([ + expect.objectContaining({ + id: 'op-1', + workflowId: 'workflow-a', + }), + expect.objectContaining({ + id: 'op-2', + workflowId: 'workflow-b', + }), + ]) + }) + + it('waits for matching workflow operations to drain', async () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'replace-state', + target: 'workflow', + payload: { state: {} }, + }, + }) + + const drained = useOperationQueueStore.getState().waitForWorkflowOperations('workflow-a') + useOperationQueueStore.getState().confirmOperation('op-1') + + await expect(drained).resolves.toBe(true) + }) + + it('does not wait on operations from other workflows', async () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'replace-state', + target: 'workflow', + payload: { state: {} }, + }, + }) + + await expect( + useOperationQueueStore.getState().waitForWorkflowOperations('workflow-b') + ).resolves.toBe(true) + }) + + it('stops waiting when an operation error is reported', async () => { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'replace-state', + target: 'workflow', + payload: { state: {} }, + }, + }) + + const drained = useOperationQueueStore.getState().waitForWorkflowOperations('workflow-a') + useOperationQueueStore.setState({ hasOperationError: true }) + + await expect(drained).resolves.toBe(false) + }) + + it('stops waiting when matching workflow operations do not drain before timeout', async () => { + vi.useFakeTimers() + try { + useOperationQueueStore.getState().addToQueue({ + id: 'op-1', + workflowId: 'workflow-a', + userId: 'user-1', + operation: { + operation: 'replace-state', + target: 'workflow', + payload: { state: {} }, + }, + }) + + const drained = useOperationQueueStore.getState().waitForWorkflowOperations('workflow-a', 100) + await vi.advanceTimersByTimeAsync(100) + + await expect(drained).resolves.toBe(false) + } finally { + vi.useRealTimers() + } + }) }) diff --git a/apps/sim/stores/operation-queue/store.ts b/apps/sim/stores/operation-queue/store.ts index 052bf9c1213..bf0d9533e85 100644 --- a/apps/sim/stores/operation-queue/store.ts +++ b/apps/sim/stores/operation-queue/store.ts @@ -29,6 +29,7 @@ const RETRY_DELAY_BASE_MS = 1000 const retryTimeouts = new Map() const operationTimeouts = new Map() +const DEFAULT_WORKFLOW_DRAIN_TIMEOUT_MS = 20000 let emitWorkflowOperation: | (( @@ -95,29 +96,32 @@ let currentRegisteredWorkflowId: string | null = null export const useOperationQueueStore = create((set, get) => ({ operations: [], + workflowOperationVersions: {}, isProcessing: false, hasOperationError: false, addToQueue: (operation) => { + set((state) => ({ + workflowOperationVersions: { + ...state.workflowOperationVersions, + [operation.workflowId]: (state.workflowOperationVersions[operation.workflowId] ?? 0) + 1, + }, + })) + + let shouldDropPendingOperation = (_op: QueuedOperation) => false + if ( operation.operation.operation === 'subblock-update' && operation.operation.target === 'subblock' ) { const { blockId, subblockId } = operation.operation.payload - set((state) => ({ - operations: [ - ...state.operations.filter( - (op) => - !( - op.status === 'pending' && - op.operation.operation === 'subblock-update' && - op.operation.target === 'subblock' && - op.operation.payload?.blockId === blockId && - op.operation.payload?.subblockId === subblockId - ) - ), - ], - })) + shouldDropPendingOperation = (op) => + op.status === 'pending' && + op.workflowId === operation.workflowId && + op.operation.operation === 'subblock-update' && + op.operation.target === 'subblock' && + op.operation.payload?.blockId === blockId && + op.operation.payload?.subblockId === subblockId } if ( @@ -125,20 +129,13 @@ export const useOperationQueueStore = create((set, get) => operation.operation.target === 'variable' ) { const { variableId, field } = operation.operation.payload - set((state) => ({ - operations: [ - ...state.operations.filter( - (op) => - !( - op.status === 'pending' && - op.operation.operation === 'variable-update' && - op.operation.target === 'variable' && - op.operation.payload?.variableId === variableId && - op.operation.payload?.field === field - ) - ), - ], - })) + shouldDropPendingOperation = (op) => + op.status === 'pending' && + op.workflowId === operation.workflowId && + op.operation.operation === 'variable-update' && + op.operation.target === 'variable' && + op.operation.payload?.variableId === variableId && + op.operation.payload?.field === field } const state = get() @@ -154,6 +151,7 @@ export const useOperationQueueStore = create((set, get) => const duplicateContent = state.operations.find( (op) => + !shouldDropPendingOperation(op) && op.operation.operation === operation.operation.operation && op.operation.target === operation.operation.target && op.workflowId === operation.workflowId && @@ -194,7 +192,7 @@ export const useOperationQueueStore = create((set, get) => }) set((state) => ({ - operations: [...state.operations, queuedOp], + operations: [...state.operations.filter((op) => !shouldDropPendingOperation(op)), queuedOp], })) get().processNextOperation() @@ -412,6 +410,42 @@ export const useOperationQueueStore = create((set, get) => operationTimeouts.set(nextOperation.id, timeoutId) }, + hasPendingOperations: (workflowId: string) => { + return get().operations.some((op) => op.workflowId === workflowId) + }, + + waitForWorkflowOperations: ( + workflowId: string, + timeoutMs = DEFAULT_WORKFLOW_DRAIN_TIMEOUT_MS + ) => { + if (!get().hasPendingOperations(workflowId)) { + return Promise.resolve(true) + } + + return new Promise((resolve) => { + let unsubscribe = () => {} + const timeout = setTimeout(() => { + unsubscribe() + resolve(false) + }, timeoutMs) + + unsubscribe = useOperationQueueStore.subscribe((state) => { + if (state.hasOperationError) { + clearTimeout(timeout) + unsubscribe() + resolve(false) + return + } + + if (!state.operations.some((op) => op.workflowId === workflowId)) { + clearTimeout(timeout) + unsubscribe() + resolve(true) + } + }) + }) + }, + cancelOperationsForBlock: (blockId: string) => { logger.debug('Canceling all operations for block', { blockId }) @@ -598,6 +632,8 @@ export function useOperationQueue() { confirmOperation: actions.confirmOperation, failOperation: actions.failOperation, processNextOperation: actions.processNextOperation, + hasPendingOperations: actions.hasPendingOperations, + waitForWorkflowOperations: actions.waitForWorkflowOperations, cancelOperationsForBlock: actions.cancelOperationsForBlock, cancelOperationsForVariable: actions.cancelOperationsForVariable, triggerOfflineMode: actions.triggerOfflineMode, diff --git a/apps/sim/stores/operation-queue/types.ts b/apps/sim/stores/operation-queue/types.ts index 7122e6a40a6..e59731a82b5 100644 --- a/apps/sim/stores/operation-queue/types.ts +++ b/apps/sim/stores/operation-queue/types.ts @@ -14,6 +14,7 @@ export interface QueuedOperation { export interface OperationQueueState { operations: QueuedOperation[] + workflowOperationVersions: Record isProcessing: boolean hasOperationError: boolean @@ -22,6 +23,8 @@ export interface OperationQueueState { failOperation: (operationId: string, retryable?: boolean) => void handleOperationTimeout: (operationId: string) => void processNextOperation: () => void + hasPendingOperations: (workflowId: string) => boolean + waitForWorkflowOperations: (workflowId: string, timeoutMs?: number) => Promise cancelOperationsForBlock: (blockId: string) => void cancelOperationsForVariable: (variableId: string) => void diff --git a/apps/sim/stores/workflow-diff/store.ts b/apps/sim/stores/workflow-diff/store.ts index b97f2951894..7f5ebce96d1 100644 --- a/apps/sim/stores/workflow-diff/store.ts +++ b/apps/sim/stores/workflow-diff/store.ts @@ -18,6 +18,7 @@ import { createBatchedUpdater, getLatestUserMessageId, persistWorkflowStateToServer, + WORKFLOW_DIFF_SETTLED_EVENT, } from './utils' const logger = createLogger('WorkflowDiffStore') @@ -60,6 +61,11 @@ function isEmptyDiffAnalysis( return !hasBlockChanges && !hasEdgeChanges && !hasFieldChanges } +function notifyDiffSettled(workflowId: string | null | undefined): void { + if (!workflowId || typeof window === 'undefined') return + window.dispatchEvent(new CustomEvent(WORKFLOW_DIFF_SETTLED_EVENT, { detail: { workflowId } })) +} + export const useWorkflowDiffStore = create()( devtools( (set, get) => { @@ -74,6 +80,10 @@ export const useWorkflowDiffStore = create { @@ -340,6 +351,7 @@ export const useWorkflowDiffStore = create { @@ -372,7 +384,8 @@ export const useWorkflowDiffStore = create { - logger.error('Failed to persist baseline workflow state:', error) - }) + const pendingGenerationBeforePersist = + get().pendingExternalUpdates[baselineWorkflowId] ?? 0 + const persisted = await persistWorkflowStateToServer(baselineWorkflowId, baselineWorkflow) + if (!persisted) { + logger.error('Failed to persist baseline workflow state') + if ( + (get().pendingExternalUpdates[baselineWorkflowId] ?? 0) <= + pendingGenerationBeforePersist + ) { + get().clearExternalUpdatePending(baselineWorkflowId) + } + get().setWorkflowReconciliationInProgress(baselineWorkflowId, false) + get().setWorkflowReconciliationError( + baselineWorkflowId, + 'Failed to save rejected copilot changes. Refresh and try again.' + ) + if ( + (get().pendingExternalUpdates[baselineWorkflowId] ?? 0) > + pendingGenerationBeforePersist + ) { + notifyDiffSettled(baselineWorkflowId) + } + return + } + if ( + (get().pendingExternalUpdates[baselineWorkflowId] ?? 0) <= + pendingGenerationBeforePersist + ) { + get().clearExternalUpdatePending(baselineWorkflowId) + } if (_triggerMessageId) { fetch(COPILOT_STATS_API_PATH, { method: 'POST', @@ -433,6 +470,9 @@ export const useWorkflowDiffStore = create { @@ -505,6 +545,65 @@ export const useWorkflowDiffStore = create { + set((state) => ({ + remoteUpdateVersions: { + ...state.remoteUpdateVersions, + [workflowId]: (state.remoteUpdateVersions[workflowId] ?? 0) + 1, + }, + reconciliationErrors: Object.fromEntries( + Object.entries(state.reconciliationErrors).filter(([id]) => id !== workflowId) + ), + })) + }, + + markExternalUpdatePending: (workflowId) => { + const current = get() + set({ + pendingExternalUpdates: { + ...current.pendingExternalUpdates, + [workflowId]: (current.pendingExternalUpdates[workflowId] ?? 0) + 1, + }, + remoteUpdateVersions: { + ...current.remoteUpdateVersions, + [workflowId]: (current.remoteUpdateVersions[workflowId] ?? 0) + 1, + }, + reconciliationErrors: Object.fromEntries( + Object.entries(current.reconciliationErrors).filter(([id]) => id !== workflowId) + ), + }) + }, + + clearExternalUpdatePending: (workflowId) => { + set((state) => { + const { [workflowId]: _removed, ...pendingExternalUpdates } = + state.pendingExternalUpdates + return { pendingExternalUpdates } + }) + }, + + setWorkflowReconciliationInProgress: (workflowId, isReconciling) => { + set((state) => { + const { [workflowId]: _removed, ...reconcilingWorkflows } = state.reconcilingWorkflows + return { + reconcilingWorkflows: isReconciling + ? { ...reconcilingWorkflows, [workflowId]: true } + : reconcilingWorkflows, + } + }) + }, + + setWorkflowReconciliationError: (workflowId, error) => { + set((state) => { + const { [workflowId]: _removed, ...reconciliationErrors } = state.reconciliationErrors + return { + reconciliationErrors: error + ? { ...reconciliationErrors, [workflowId]: error } + : reconciliationErrors, + } + }) + }, } }, { name: 'workflow-diff-store' } diff --git a/apps/sim/stores/workflow-diff/types.ts b/apps/sim/stores/workflow-diff/types.ts index b6cc01203fd..2beb4253c76 100644 --- a/apps/sim/stores/workflow-diff/types.ts +++ b/apps/sim/stores/workflow-diff/types.ts @@ -10,6 +10,10 @@ export interface WorkflowDiffState { diffAnalysis: DiffAnalysis | null diffMetadata: WorkflowDiff['metadata'] | null diffError?: string | null + pendingExternalUpdates: Record + remoteUpdateVersions: Record + reconcilingWorkflows: Record + reconciliationErrors: Record _triggerMessageId?: string | null } @@ -37,5 +41,10 @@ export interface WorkflowDiffActions { acceptChanges: (options?: DiffActionOptions) => Promise rejectChanges: (options?: DiffActionOptions) => Promise reapplyDiffMarkers: () => void + markRemoteUpdateSeen: (workflowId: string) => void + markExternalUpdatePending: (workflowId: string) => void + clearExternalUpdatePending: (workflowId: string) => void + setWorkflowReconciliationInProgress: (workflowId: string, isReconciling: boolean) => void + setWorkflowReconciliationError: (workflowId: string, error: string | null) => void _batchedStateUpdate: (updates: Partial) => void } diff --git a/apps/sim/stores/workflow-diff/utils.test.ts b/apps/sim/stores/workflow-diff/utils.test.ts new file mode 100644 index 00000000000..c40787ce678 --- /dev/null +++ b/apps/sim/stores/workflow-diff/utils.test.ts @@ -0,0 +1,77 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it } from 'vitest' +import { useVariablesStore } from '@/stores/variables/store' +import { applyWorkflowVariablesToStore } from '@/stores/workflow-diff/utils' + +describe('applyWorkflowVariablesToStore', () => { + beforeEach(() => { + useVariablesStore.setState({ + variables: {}, + isLoading: false, + error: null, + isEditing: null, + }) + }) + + it('hydrates variables for the target workflow and preserves other workflows', () => { + useVariablesStore.setState({ + variables: { + old: { + id: 'old', + workflowId: 'workflow-a', + name: 'oldValue', + type: 'plain', + value: 'stale', + }, + other: { + id: 'other', + workflowId: 'workflow-b', + name: 'otherValue', + type: 'plain', + value: 'kept', + }, + }, + }) + + applyWorkflowVariablesToStore('workflow-a', { + next: { + id: 'next', + name: 'nextValue', + type: 'number', + value: 42, + }, + }) + + expect(useVariablesStore.getState().variables).toEqual({ + other: { + id: 'other', + workflowId: 'workflow-b', + name: 'otherValue', + type: 'plain', + value: 'kept', + }, + next: { + id: 'next', + workflowId: 'workflow-a', + name: 'nextValue', + type: 'number', + value: 42, + }, + }) + }) + + it('preserves null variable values from persisted workflow state', () => { + applyWorkflowVariablesToStore('workflow-a', { + next: { + id: 'next', + name: 'nullableValue', + type: 'object', + value: null, + }, + }) + + expect(useVariablesStore.getState().variables.next.value).toBeNull() + }) +}) diff --git a/apps/sim/stores/workflow-diff/utils.ts b/apps/sim/stores/workflow-diff/utils.ts index f7c8ab7c393..68d93bc1451 100644 --- a/apps/sim/stores/workflow-diff/utils.ts +++ b/apps/sim/stores/workflow-diff/utils.ts @@ -7,6 +7,8 @@ import { type WorkflowStateContractInput, } from '@/lib/api/contracts/workflows' import { stripWorkflowDiffMarkers } from '@/lib/workflows/diff' +import { useVariablesStore } from '@/stores/variables/store' +import type { Variable } from '@/stores/variables/types' import { useWorkflowRegistry } from '../workflows/registry/store' import { useSubBlockStore } from '../workflows/subblock/store' import { mergeSubblockState } from '../workflows/utils' @@ -15,6 +17,7 @@ import type { WorkflowState } from '../workflows/workflow/types' import type { WorkflowDiffState } from './types' const logger = createLogger('WorkflowDiffStore') +export const WORKFLOW_DIFF_SETTLED_EVENT = 'workflow-diff-settled' export function cloneWorkflowState(state: WorkflowState): WorkflowState { return { @@ -58,6 +61,9 @@ export function applyWorkflowStateToStores( workflowStore.replaceWorkflowState(cloned, options) const subBlockValues = extractSubBlockValues(workflowState) useSubBlockStore.getState().setWorkflowValues(workflowId, subBlockValues) + if (Object.hasOwn(workflowState, 'variables')) { + applyWorkflowVariablesToStore(workflowId, workflowState.variables) + } // Verify what's in the store after apply const afterState = workflowStore.getWorkflowState() @@ -67,6 +73,33 @@ export function applyWorkflowStateToStores( }) } +export function applyWorkflowVariablesToStore( + workflowId: string, + variables?: WorkflowState['variables'] | null +) { + const stampedVariables: Record = {} + + Object.entries(variables || {}).forEach(([id, variable]) => { + if (!variable?.name) return + stampedVariables[id] = { + id: variable.id || id, + workflowId, + name: variable.name, + type: variable.type || 'plain', + value: Object.hasOwn(variable, 'value') ? variable.value : '', + } + }) + + useVariablesStore.setState((state) => ({ + variables: { + ...Object.fromEntries( + Object.entries(state.variables).filter(([, variable]) => variable.workflowId !== workflowId) + ), + ...stampedVariables, + }, + })) +} + export function captureBaselineSnapshot(workflowId: string): WorkflowState { const workflowStore = useWorkflowStore.getState() const currentState = workflowStore.getWorkflowState() diff --git a/packages/workflow-persistence/src/load.ts b/packages/workflow-persistence/src/load.ts index 2e6d864b9f7..3f6f8d2de39 100644 --- a/packages/workflow-persistence/src/load.ts +++ b/packages/workflow-persistence/src/load.ts @@ -2,15 +2,15 @@ import { db, workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@ import { createLogger } from '@sim/logger' import type { BlockState, Loop, Parallel } from '@sim/workflow-types/workflow' import { SUBFLOW_TYPES } from '@sim/workflow-types/workflow' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { Edge } from 'reactflow' -import type { NormalizedWorkflowData } from './types' +import type { DbOrTx, NormalizedWorkflowData } from './types' const logger = createLogger('WorkflowPersistenceLoad') export interface RawNormalizedWorkflow extends NormalizedWorkflowData { workspaceId: string - blockUpdatedAt: Record + blockUpdatedAtById: Record } /** @@ -28,14 +28,16 @@ export interface RawNormalizedWorkflow extends NormalizedWorkflowData { * config on the returned object will silently diverge from the migrated block. */ export async function loadWorkflowFromNormalizedTablesRaw( - workflowId: string + workflowId: string, + externalTx?: DbOrTx ): Promise { try { + const tx = externalTx ?? db const [blocks, edges, subflows, [workflowRow]] = await Promise.all([ - db.select().from(workflowBlocks).where(eq(workflowBlocks.workflowId, workflowId)), - db.select().from(workflowEdges).where(eq(workflowEdges.workflowId, workflowId)), - db.select().from(workflowSubflows).where(eq(workflowSubflows.workflowId, workflowId)), - db + tx.select().from(workflowBlocks).where(eq(workflowBlocks.workflowId, workflowId)), + tx.select().from(workflowEdges).where(eq(workflowEdges.workflowId, workflowId)), + tx.select().from(workflowSubflows).where(eq(workflowSubflows.workflowId, workflowId)), + tx .select({ workspaceId: workflow.workspaceId }) .from(workflow) .where(eq(workflow.id, workflowId)) @@ -51,7 +53,7 @@ export async function loadWorkflowFromNormalizedTablesRaw( } const blocksMap: Record = {} - const blockUpdatedAt: Record = {} + const blockUpdatedAtById: Record = {} blocks.forEach((block) => { const blockData = (block.data ?? {}) as BlockState['data'] @@ -75,7 +77,7 @@ export async function loadWorkflowFromNormalizedTablesRaw( } blocksMap[block.id] = assembled - blockUpdatedAt[block.id] = block.updatedAt + blockUpdatedAtById[block.id] = block.updatedAt ?? null }) const edgesArray: Edge[] = edges.map((edge) => ({ @@ -154,7 +156,7 @@ export async function loadWorkflowFromNormalizedTablesRaw( parallels, isFromNormalizedTables: true, workspaceId: workflowRow.workspaceId, - blockUpdatedAt, + blockUpdatedAtById, } } catch (error) { logger.error(`Error loading workflow ${workflowId} from normalized tables:`, error) @@ -166,11 +168,23 @@ export async function persistMigratedBlocks( workflowId: string, originalBlocks: Record, migratedBlocks: Record, - originalBlockUpdatedAt: Record = {} + blockUpdatedAtById: Record = {} ): Promise { try { for (const [blockId, block] of Object.entries(migratedBlocks)) { if (block !== originalBlocks[blockId]) { + const hasExpectedUpdatedAt = Object.hasOwn(blockUpdatedAtById, blockId) + const expectedUpdatedAt = blockUpdatedAtById[blockId] + const whereClause = hasExpectedUpdatedAt + ? and( + eq(workflowBlocks.id, blockId), + eq(workflowBlocks.workflowId, workflowId), + expectedUpdatedAt === null + ? isNull(workflowBlocks.updatedAt) + : eq(workflowBlocks.updatedAt, expectedUpdatedAt) + ) + : and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId)) + await db .update(workflowBlocks) .set({ @@ -178,15 +192,7 @@ export async function persistMigratedBlocks( data: block.data, updatedAt: new Date(), }) - .where( - originalBlockUpdatedAt[blockId] - ? and( - eq(workflowBlocks.id, blockId), - eq(workflowBlocks.workflowId, workflowId), - eq(workflowBlocks.updatedAt, originalBlockUpdatedAt[blockId]) - ) - : and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId)) - ) + .where(whereClause) } } } catch (err) { From 408669dd852b7d76a716e503a7874e8f84ccaf0e Mon Sep 17 00:00:00 2001 From: Waleed Date: Thu, 7 May 2026 17:51:11 -0700 Subject: [PATCH 10/33] fix(md-render): inline code inherits heading size in mothership/templates/changelog (#4504) --- apps/sim/app/changelog/components/timeline-list.tsx | 2 +- apps/sim/app/templates/[id]/template.tsx | 2 +- .../message-content/components/chat-content/chat-content.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/apps/sim/app/changelog/components/timeline-list.tsx b/apps/sim/app/changelog/components/timeline-list.tsx index 26c91197345..a506c61f394 100644 --- a/apps/sim/app/changelog/components/timeline-list.tsx +++ b/apps/sim/app/changelog/components/timeline-list.tsx @@ -195,7 +195,7 @@ export default function ChangelogList({ initialEntries }: Props) { ), inlineCode: ({ children }) => ( - + {children} ), diff --git a/apps/sim/app/templates/[id]/template.tsx b/apps/sim/app/templates/[id]/template.tsx index 38e9f746bc2..9bac9d9fc74 100644 --- a/apps/sim/app/templates/[id]/template.tsx +++ b/apps/sim/app/templates/[id]/template.tsx @@ -887,7 +887,7 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template ), li: ({ children }) =>
  • {children}
  • , inlineCode: ({ children }) => ( - + {children} ), diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/message-content/components/chat-content/chat-content.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/message-content/components/chat-content/chat-content.tsx index 2c6fa99d5e2..7ff6fa6330b 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/message-content/components/chat-content/chat-content.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/message-content/components/chat-content/chat-content.tsx @@ -217,7 +217,7 @@ const MARKDOWN_COMPONENTS = { }, inlineCode({ children }: { children?: React.ReactNode }) { return ( - + {children} ) From 6a006851fa588cb092cc66615653fb72e47a2b82 Mon Sep 17 00:00:00 2001 From: Waleed Date: Thu, 7 May 2026 19:39:48 -0700 Subject: [PATCH 11/33] improvement(apollo): align tools and block with Apollo API docs (#4487) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * improvement(apollo): align tools and block with Apollo API docs * improvement(apollo): fix tool outputs to match Apollo API response shapes * chore(apollo): regenerate docs for output changes * fix(apollo): address PR review comments * fix(apollo): allow skipped_contact_ids as hash per Apollo docs * docs * fix(apollo): add runtime guard for account_bulk_update empty body * fix(apollo): require contact_attributes for bulk_update * fix(apollo): add subblock id migrations for renamed opportunity fields * fix(apollo): tighten account_bulk_update guard and accept object attrs * fix(apollo): require contact_ids with object-form contact_attributes * docs(apollo): clarify contact_bulk_update parameter requirements * fix(apollo): handle flat and wrapped contact response shapes * validate * fix(apollo): mirror bulk_update guard, preserve update fields in migration, expose account_bulk_create options * fix(apollo): don't clobber user contact_attributes in migration; simplify task_create created flag * fix(apollo): drop undocumented task type, preserve mixed-array IDs, migrate note→task_notes * fix(apollo): align tools and block with live API docs Final pass over the Apollo integration after a per-tool forensic audit against Apollo.io docs. Notable fixes: - organization_enrich: GET+querystring -> POST+JSON body (canonical, non master-key) - organization_bulk_enrich: ?domains[]= -> JSON body { organizations } - people_search: declare/forward organization_num_employees_ranges; fix contact_email_status placeholder ("likely to engage", with spaces) - account_bulk_create: surface failed_accounts and failed count - contact_bulk_create: expand documented per-contact fields (CRM IDs, phone_numbers, contact_emails, typed_custom_fields, etc.) - sequence_add_contacts: surface remaining documented filter params - task_create: confirm wire field name (note) and remap from task_notes - types: tighten params/responses for the above Co-Authored-By: Claude Opus 4.7 * docs * fix(apollo): add _removed_* migrations for retired opportunity subblocks * fix(apollo): expose webhook_url subblock for people enrich phone reveal * fix(apollo): drop colliding account_ids migration, enforce contact bulk limit, expose async toggle for accounts * fix(apollo): cap account_attributes at 1000 in bulk update * fix(apollo): drop bare-id merging in bulk update migration to avoid empty attribute objects * fix(apollo): reject ambiguous account/contact_ids + array-form attributes --------- Co-authored-by: Claude Opus 4.7 --- apps/docs/content/docs/en/tools/apollo.mdx | 213 +++-- .../integrations/data/integrations.json | 4 +- apps/sim/blocks/blocks/apollo.ts | 751 +++++++++++++----- .../migrations/subblock-migrations.ts | 10 + apps/sim/tools/apollo/account_bulk_create.ts | 63 +- apps/sim/tools/apollo/account_bulk_update.ts | 108 ++- apps/sim/tools/apollo/account_create.ts | 40 +- apps/sim/tools/apollo/account_search.ts | 40 +- apps/sim/tools/apollo/account_update.ts | 38 +- apps/sim/tools/apollo/contact_bulk_create.ts | 25 +- apps/sim/tools/apollo/contact_bulk_update.ts | 90 ++- apps/sim/tools/apollo/contact_create.ts | 96 ++- apps/sim/tools/apollo/contact_search.ts | 29 +- apps/sim/tools/apollo/contact_update.ts | 92 ++- apps/sim/tools/apollo/email_accounts.ts | 5 +- apps/sim/tools/apollo/opportunity_create.ts | 41 +- apps/sim/tools/apollo/opportunity_get.ts | 10 +- apps/sim/tools/apollo/opportunity_search.ts | 52 +- apps/sim/tools/apollo/opportunity_update.ts | 39 +- .../tools/apollo/organization_bulk_enrich.ts | 23 +- apps/sim/tools/apollo/organization_enrich.ts | 34 +- apps/sim/tools/apollo/organization_search.ts | 34 +- apps/sim/tools/apollo/people_bulk_enrich.ts | 64 +- apps/sim/tools/apollo/people_enrich.ts | 61 +- apps/sim/tools/apollo/people_search.ts | 68 +- .../sim/tools/apollo/sequence_add_contacts.ts | 184 ++++- apps/sim/tools/apollo/sequence_search.ts | 9 +- apps/sim/tools/apollo/task_create.ts | 71 +- apps/sim/tools/apollo/task_search.ts | 27 +- apps/sim/tools/apollo/types.ts | 326 +++++--- 30 files changed, 1955 insertions(+), 692 deletions(-) diff --git a/apps/docs/content/docs/en/tools/apollo.mdx b/apps/docs/content/docs/en/tools/apollo.mdx index d063d017123..355f62d9642 100644 --- a/apps/docs/content/docs/en/tools/apollo.mdx +++ b/apps/docs/content/docs/en/tools/apollo.mdx @@ -49,9 +49,15 @@ Search Apollo | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key | | `person_titles` | array | No | Job titles to search for \(e.g., \["CEO", "VP of Sales"\]\) | +| `include_similar_titles` | boolean | No | Whether to return people with job titles similar to person_titles | | `person_locations` | array | No | Locations to search in \(e.g., \["San Francisco, CA", "New York, NY"\]\) | -| `person_seniorities` | array | No | Seniority levels \(e.g., \["senior", "executive", "manager"\]\) | -| `organization_names` | array | No | Company names to search within | +| `person_seniorities` | array | No | Seniority levels \(one of: owner, founder, c_suite, partner, vp, head, director, manager, senior, entry, intern\) | +| `organization_ids` | array | No | Apollo organization IDs to filter by \(e.g., \["5e66b6381e05b4008c8331b8"\]\) | +| `organization_names` | array | No | Company names to search within \(legacy filter\) | +| `organization_locations` | array | No | Headquarters locations of the people's current employer \(e.g., \['texas', 'tokyo', 'spain'\]\) | +| `q_organization_domains_list` | array | No | Employer domain names \(e.g., \["apollo.io", "microsoft.com"\]\) — up to 1,000, no www. or @ | +| `organization_num_employees_ranges` | array | No | Employee count ranges for the person\'s current employer. Each entry is "min,max" \(e.g., \["1,10", "250,500", "10000,20000"\]\) | +| `contact_email_status` | array | No | Email statuses to filter by: "verified", "unverified", "likely to engage", "unavailable" | | `q_keywords` | string | No | Keywords to search for | | `page` | number | No | Page number for pagination, default 1 \(e.g., 1, 2, 3\) | | `per_page` | number | No | Results per page, default 25, max 100 \(e.g., 25, 50, 100\) | @@ -76,12 +82,16 @@ Enrich data for a single person using Apollo | `apiKey` | string | Yes | Apollo API key | | `first_name` | string | No | First name of the person | | `last_name` | string | No | Last name of the person | +| `name` | string | No | Full name of the person \(alternative to first_name/last_name\) | +| `id` | string | No | Apollo ID for the person | +| `hashed_email` | string | No | MD5 or SHA-256 hashed email | | `email` | string | No | Email address of the person | | `organization_name` | string | No | Company name where the person works | | `domain` | string | No | Company domain \(e.g., "apollo.io", "acme.com"\) | | `linkedin_url` | string | No | LinkedIn profile URL | | `reveal_personal_emails` | boolean | No | Reveal personal email addresses \(uses credits\) | -| `reveal_phone_number` | boolean | No | Reveal phone numbers \(uses credits\) | +| `reveal_phone_number` | boolean | No | Reveal phone numbers \(uses credits, requires webhook_url\) | +| `webhook_url` | string | No | Webhook URL for async phone number delivery \(required when reveal_phone_number is true\) | #### Output @@ -101,15 +111,18 @@ Enrich data for up to 10 people at once using Apollo | `apiKey` | string | Yes | Apollo API key | | `people` | array | Yes | Array of people to enrich \(max 10\) | | `reveal_personal_emails` | boolean | No | Reveal personal email addresses \(uses credits\) | -| `reveal_phone_number` | boolean | No | Reveal phone numbers \(uses credits\) | +| `reveal_phone_number` | boolean | No | Reveal phone numbers \(uses credits, requires webhook_url\) | +| `webhook_url` | string | No | Webhook URL for async phone number delivery \(required when reveal_phone_number is true\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `people` | json | Array of enriched people data | -| `total` | number | Total number of people processed | -| `enriched` | number | Number of people successfully enriched | +| `matches` | json | Array of enriched people \(null entries indicate no match\) | +| `total_requested_enrichments` | number | Total number of records submitted for enrichment | +| `unique_enriched_records` | number | Number of records successfully enriched | +| `missing_records` | number | Number of records that could not be enriched | +| `credits_consumed` | number | Number of Apollo credits consumed by this request | ### `apollo_organization_search` @@ -120,10 +133,13 @@ Search Apollo | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key | -| `organization_locations` | array | No | Company locations to search | -| `organization_num_employees_ranges` | array | No | Employee count ranges \(e.g., \["1-10", "11-50"\]\) | +| `organization_locations` | array | No | Company HQ locations \(cities, US states, or countries\) | +| `organization_not_locations` | array | No | Exclude companies whose HQ is in these locations | +| `organization_num_employees_ranges` | array | No | Employee count ranges as "min,max" strings \(e.g., \["1,10", "250,500", "10000,20000"\]\) | | `q_organization_keyword_tags` | array | No | Industry or keyword tags | | `q_organization_name` | string | No | Organization name to search for \(e.g., "Acme", "TechCorp"\) | +| `organization_ids` | array | No | Apollo organization IDs to include \(e.g., \["5e66b6381e05b4008c8331b8"\]\) | +| `q_organization_domains_list` | array | No | Domain names to filter by \(no www. or @, up to 1,000\) | | `page` | number | No | Page number for pagination \(e.g., 1, 2, 3\) | | `per_page` | number | No | Results per page, max 100 \(e.g., 25, 50, 100\) | @@ -145,8 +161,7 @@ Enrich data for a single organization using Apollo | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key | -| `organization_name` | string | No | Name of the organization \(e.g., "Acme Corporation"\) - at least one of organization_name or domain is required | -| `domain` | string | No | Company domain \(e.g., "apollo.io", "acme.com"\) - at least one of domain or organization_name is required | +| `domain` | string | Yes | Company domain \(e.g., "apollo.io", "acme.com"\) | #### Output @@ -164,15 +179,17 @@ Enrich data for up to 10 organizations at once using Apollo | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key | -| `organizations` | array | Yes | Array of organizations to enrich \(max 10\) | +| `organizations` | array | Yes | Array of organizations to enrich \(max 10\). Each item requires `name` and may include `domain` \(e.g., \[\{"name": "Example Corp", "domain": "example.com"\}\]\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | | `organizations` | json | Array of enriched organization data | -| `total` | number | Total number of organizations processed | -| `enriched` | number | Number of organizations successfully enriched | +| `total` | number | Total number of domains requested | +| `enriched` | number | Number of unique enriched records | +| `missing_records` | number | Number of domains that could not be enriched | +| `unique_domains` | number | Number of unique domains processed | ### `apollo_contact_create` @@ -188,7 +205,19 @@ Create a new contact in your Apollo database | `email` | string | No | Email address of the contact | | `title` | string | No | Job title \(e.g., "VP of Sales", "Software Engineer"\) | | `account_id` | string | No | Apollo account ID to associate with \(e.g., "acc_abc123"\) | -| `owner_id` | string | No | User ID of the contact owner | +| `owner_id` | string | No | User ID of the contact owner \(accepted by Apollo but not officially documented for POST /contacts\) | +| `organization_name` | string | No | Name of the contact\'s employer \(e.g., "Apollo"\) | +| `website_url` | string | No | Corporate website URL \(e.g., "https://www.apollo.io/"\) | +| `label_names` | array | No | Lists/labels to add the contact to \(e.g., \["Prospects"\]\) | +| `contact_stage_id` | string | No | Apollo ID for the contact stage | +| `present_raw_address` | string | No | Personal location for the contact \(e.g., "Atlanta, United States"\) | +| `direct_phone` | string | No | Primary phone number | +| `corporate_phone` | string | No | Work/office phone number | +| `mobile_phone` | string | No | Mobile phone number | +| `home_phone` | string | No | Home phone number | +| `other_phone` | string | No | Alternative phone number | +| `typed_custom_fields` | json | No | Custom field values keyed by custom field ID | +| `run_dedupe` | boolean | No | When true, Apollo deduplicates against existing contacts | #### Output @@ -212,7 +241,18 @@ Update an existing contact in your Apollo database | `email` | string | No | Email address | | `title` | string | No | Job title \(e.g., "VP of Sales", "Software Engineer"\) | | `account_id` | string | No | Apollo account ID \(e.g., "acc_abc123"\) | -| `owner_id` | string | No | User ID of the contact owner | +| `owner_id` | string | No | User ID of the contact owner \(accepted by Apollo but not officially documented for PATCH /contacts/\{id\}\) | +| `organization_name` | string | No | Name of the contact\'s employer \(e.g., "Apollo"\) | +| `website_url` | string | No | Corporate website URL \(e.g., "https://www.apollo.io/"\) | +| `label_names` | array | No | Lists/labels to add the contact to \(e.g., \["Prospects"\]\) | +| `contact_stage_id` | string | No | Apollo ID for the contact stage | +| `present_raw_address` | string | No | Personal location for the contact \(e.g., "Atlanta, United States"\) | +| `direct_phone` | string | No | Primary phone number | +| `corporate_phone` | string | No | Work/office phone number | +| `mobile_phone` | string | No | Mobile phone number | +| `home_phone` | string | No | Home phone number | +| `other_phone` | string | No | Alternative phone number | +| `typed_custom_fields` | json | No | Custom field values keyed by custom field ID \(accepted by Apollo but not officially documented for PATCH /contacts/\{id\}\) | #### Output @@ -232,6 +272,9 @@ Search your team | `apiKey` | string | Yes | Apollo API key | | `q_keywords` | string | No | Keywords to search for | | `contact_stage_ids` | array | No | Filter by contact stage IDs | +| `contact_label_ids` | array | No | Filter by Apollo label IDs \(lists\) | +| `sort_by_field` | string | No | Sort field: contact_last_activity_date, contact_email_last_opened_at, contact_email_last_clicked_at, contact_created_at, or contact_updated_at | +| `sort_ascending` | boolean | No | When true, sort ascending. Must be used together with sort_by_field | | `page` | number | No | Page number for pagination \(e.g., 1, 2, 3\) | | `per_page` | number | No | Results per page, max 100 \(e.g., 25, 50, 100\) | @@ -251,7 +294,8 @@ Create up to 100 contacts at once in your Apollo database. Supports deduplicatio | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | -| `contacts` | array | Yes | Array of contacts to create \(max 100\). Each contact should include first_name, last_name, and optionally email, title, account_id, owner_id | +| `contacts` | array | Yes | Array of contacts to create \(max 100\). Each contact may include first_name, last_name, email, title, organization_name, account_id, owner_id, contact_stage_id, linkedin_url, phone \(single string\) or phone_numbers \(array of \{raw_number, position\}\), contact_emails, typed_custom_fields, and CRM IDs \(salesforce_contact_id, hubspot_id, team_id\) for cross-system matching | +| `append_label_names` | array | No | Label names to add to all contacts in this request \(e.g., \["Hot Lead"\]\) | | `run_dedupe` | boolean | No | Enable deduplication to prevent creating duplicate contacts. When true, existing contacts are returned without modification | #### Output @@ -273,17 +317,16 @@ Update up to 100 existing contacts at once in your Apollo database. Each contact | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | -| `contacts` | array | Yes | Array of contacts to update \(max 100\). Each contact must include id field, and optionally first_name, last_name, email, title, account_id, owner_id | +| `contact_ids` | array | No | Array of contact IDs to update. Must be paired with an object-form contact_attributes specifying the fields to apply uniformly to all listed contacts. | +| `contact_attributes` | json | No | Required. Either an array of per-contact updates \(each with id\) — used standalone — or a single object of attributes to apply to all contact_ids. Supported fields: owner_id, email, organization_name, title, first_name, last_name, account_id, present_raw_address, linkedin_url, typed_custom_fields | +| `async` | boolean | No | Force asynchronous processing. Automatically enabled for >100 contacts | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `updated_contacts` | json | Array of successfully updated contacts | -| `failed_contacts` | json | Array of contacts that failed to update | -| `total_submitted` | number | Total number of contacts submitted | -| `updated` | number | Number of contacts successfully updated | -| `failed` | number | Number of contacts that failed to update | +| `message` | string | Confirmation message from Apollo | +| `job_id` | string | Async job ID \(returned for >100 contacts\) | ### `apollo_account_create` @@ -293,11 +336,14 @@ Create a new account (company) in your Apollo database | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `apiKey` | string | Yes | Apollo API key | +| `apiKey` | string | Yes | Apollo API key \(master key required\) | | `name` | string | Yes | Company name \(e.g., "Acme Corporation"\) | -| `website_url` | string | No | Company website URL | -| `phone` | string | No | Company phone number | -| `owner_id` | string | No | User ID of the account owner | +| `domain` | string | No | Company domain without www. prefix \(e.g., "acme.com"\) | +| `phone` | string | No | Primary phone number for the account | +| `owner_id` | string | No | Apollo user ID of the account owner | +| `account_stage_id` | string | No | Apollo ID for the account stage to assign this account to | +| `raw_address` | string | No | Corporate location \(e.g., "San Francisco, CA, USA"\) | +| `typed_custom_fields` | json | No | Custom field values as \{ custom_field_id: value \} map | #### Output @@ -317,9 +363,12 @@ Update an existing account in your Apollo database | `apiKey` | string | Yes | Apollo API key | | `account_id` | string | Yes | ID of the account to update \(e.g., "acc_abc123"\) | | `name` | string | No | Company name \(e.g., "Acme Corporation"\) | -| `website_url` | string | No | Company website URL | +| `domain` | string | No | Company domain \(e.g., "acme.com"\) | | `phone` | string | No | Company phone number | -| `owner_id` | string | No | User ID of the account owner | +| `owner_id` | string | No | Apollo user ID of the account owner | +| `account_stage_id` | string | No | Apollo ID for the account stage to assign this account to | +| `raw_address` | string | No | Corporate location \(e.g., "San Francisco, CA, USA"\) | +| `typed_custom_fields` | json | No | Custom field values as \{ custom_field_id: value \} map | #### Output @@ -337,9 +386,11 @@ Search your team | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | -| `q_keywords` | string | No | Keywords to search for in account data | -| `owner_id` | string | No | Filter by account owner user ID | +| `q_organization_name` | string | No | Filter accounts by organization name \(partial-match search\) | | `account_stage_ids` | array | No | Filter by account stage IDs | +| `account_label_ids` | array | No | Filter by account label IDs | +| `sort_by_field` | string | No | Sort field: "account_last_activity_date", "account_created_at", or "account_updated_at" | +| `sort_ascending` | boolean | No | Sort ascending when true. Defaults to descending. | | `page` | number | No | Page number for pagination \(e.g., 1, 2, 3\) | | `per_page` | number | No | Results per page, max 100 \(e.g., 25, 50, 100\) | @@ -352,24 +403,28 @@ Search your team ### `apollo_account_bulk_create` -Create up to 100 accounts at once in your Apollo database. Note: Apollo does not apply deduplication - duplicate accounts may be created if entries share similar names or domains. Master key required. +Create up to 100 accounts at once in your Apollo database. Set run_dedupe=true to deduplicate by domain, organization_id, and name. Master key required. #### Input | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | -| `accounts` | array | Yes | Array of accounts to create \(max 100\). Each account should include name \(required\), and optionally website_url, phone, owner_id | +| `accounts` | array | Yes | Array of accounts to create \(max 100\). Each account should include a name, and may optionally include domain, phone, phone_status_cd, raw_address, owner_id, linkedin_url, facebook_url, twitter_url, salesforce_id, and hubspot_id. | +| `append_label_names` | array | No | Array of label names to add to ALL accounts in this request | +| `run_dedupe` | boolean | No | When true, performs aggressive deduplication by domain, organization_id, and name \(defaults to false\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | | `created_accounts` | json | Array of newly created accounts | -| `failed_accounts` | json | Array of accounts that failed to create | -| `total_submitted` | number | Total number of accounts submitted | +| `existing_accounts` | json | Array of existing accounts returned by Apollo \(when duplicates are detected\) | +| `failed_accounts` | json | Array of accounts that failed to be created, with reasons for failure | +| `total_submitted` | number | Total number of accounts in the response \(created + existing + failed\) | | `created` | number | Number of accounts successfully created | -| `failed` | number | Number of accounts that failed to create | +| `existing` | number | Number of existing accounts found | +| `failed` | number | Number of accounts that failed to be created | ### `apollo_account_bulk_update` @@ -380,17 +435,18 @@ Update up to 1000 existing accounts at once in your Apollo database (higher limi | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | -| `accounts` | array | Yes | Array of accounts to update \(max 1000\). Each account must include id field, and optionally name, website_url, phone, owner_id | +| `account_ids` | array | No | Array of account IDs to update with the same values \(max 1000\). Use with name/owner_id for uniform updates. Use either this OR account_attributes. | +| `name` | string | No | When using account_ids, apply this name to all accounts | +| `owner_id` | string | No | When using account_ids, apply this owner to all accounts | +| `account_attributes` | json | No | Array of account objects with individual updates \(each must include id\). Example: \[\{"id": "acc1", "name": "Acme", "owner_id": "u1", "account_stage_id": "s1", "typed_custom_fields": \{"field_id": "value"\}\}\] | +| `async` | boolean | No | When true, processes the update asynchronously. Only supported when using account_ids; returns 422 if used with account_attributes. | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `updated_accounts` | json | Array of successfully updated accounts | -| `failed_accounts` | json | Array of accounts that failed to update | -| `total_submitted` | number | Total number of accounts submitted | -| `updated` | number | Number of accounts successfully updated | -| `failed` | number | Number of accounts that failed to update | +| `message` | string | Confirmation message from Apollo | +| `account_ids` | json | IDs of accounts that were updated | ### `apollo_opportunity_create` @@ -402,12 +458,12 @@ Create a new deal for an account in your Apollo database (master key required) | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | | `name` | string | Yes | Name of the opportunity/deal \(e.g., "Enterprise License - Q1"\) | -| `account_id` | string | Yes | ID of the account this opportunity belongs to \(e.g., "acc_abc123"\) | -| `amount` | number | No | Monetary value of the opportunity | -| `stage_id` | string | No | ID of the deal stage | +| `account_id` | string | No | ID of the account this opportunity belongs to \(e.g., "acc_abc123"\) | +| `amount` | string | No | Monetary value as a plain number string with no commas or currency symbols | +| `opportunity_stage_id` | string | No | ID of the opportunity stage | | `owner_id` | string | No | User ID of the opportunity owner | -| `close_date` | string | No | Expected close date \(ISO 8601 format\) | -| `description` | string | No | Description or notes about the opportunity | +| `closed_date` | string | No | Expected close date in YYYY-MM-DD format | +| `typed_custom_fields` | json | No | Custom field values as \{ custom_field_id: value \} map | #### Output @@ -425,10 +481,7 @@ Search and list all deals/opportunities in your team | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key | -| `q_keywords` | string | No | Keywords to search for in opportunity names | -| `account_ids` | array | No | Filter by specific account IDs \(e.g., \["acc_123", "acc_456"\]\) | -| `stage_ids` | array | No | Filter by deal stage IDs | -| `owner_ids` | array | No | Filter by opportunity owner IDs | +| `sort_by_field` | string | No | Sort field: "amount", "is_closed", or "is_won" | | `page` | number | No | Page number for pagination \(e.g., 1, 2, 3\) | | `per_page` | number | No | Results per page, max 100 \(e.g., 25, 50, 100\) | @@ -470,11 +523,11 @@ Update an existing deal/opportunity in your Apollo database | `apiKey` | string | Yes | Apollo API key | | `opportunity_id` | string | Yes | ID of the opportunity to update \(e.g., "opp_abc123"\) | | `name` | string | No | Name of the opportunity/deal \(e.g., "Enterprise License - Q1"\) | -| `amount` | number | No | Monetary value of the opportunity | -| `stage_id` | string | No | ID of the deal stage | +| `amount` | string | No | Monetary value as a plain number string with no commas or currency symbols | +| `opportunity_stage_id` | string | No | ID of the opportunity stage | | `owner_id` | string | No | User ID of the opportunity owner | -| `close_date` | string | No | Expected close date \(ISO 8601 format\) | -| `description` | string | No | Description or notes about the opportunity | +| `closed_date` | string | No | Expected close date in YYYY-MM-DD format | +| `typed_custom_fields` | json | No | Custom field values as \{ custom_field_id: value \} map | #### Output @@ -493,7 +546,6 @@ Search for sequences/campaigns in your team | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | | `q_name` | string | No | Search sequences by name \(e.g., "Outbound Q1", "Follow-up"\) | -| `active` | boolean | No | Filter by active status \(true for active sequences, false for inactive\) | | `page` | number | No | Page number for pagination \(e.g., 1, 2, 3\) | | `per_page` | number | No | Results per page, max 100 \(e.g., 25, 50, 100\) | @@ -516,40 +568,58 @@ Add contacts to an Apollo sequence | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | | `sequence_id` | string | Yes | ID of the sequence to add contacts to \(e.g., "seq_abc123"\) | -| `contact_ids` | array | Yes | Array of contact IDs to add to the sequence \(e.g., \["con_abc123", "con_def456"\]\) | -| `emailer_campaign_id` | string | No | Optional emailer campaign ID | -| `send_email_from_user_id` | string | No | User ID to send emails from | +| `contact_ids` | array | No | Array of contact IDs to add to the sequence \(e.g., \["con_abc123", "con_def456"\]\). Either contact_ids or label_names must be provided. | +| `label_names` | array | No | Array of label names to identify contacts to add to the sequence. Either contact_ids or label_names must be provided. | +| `send_email_from_email_account_id` | string | Yes | ID of the email account to send from. Use the Get Email Accounts operation to look this up. | +| `send_email_from_email_address` | string | No | Specific email address to send from within the email account. | +| `sequence_no_email` | boolean | No | Add contacts even if they have no email address | +| `sequence_unverified_email` | boolean | No | Add contacts with unverified email addresses | +| `sequence_job_change` | boolean | No | Add contacts who recently changed jobs | +| `sequence_active_in_other_campaigns` | boolean | No | Add contacts active in other campaigns | +| `sequence_finished_in_other_campaigns` | boolean | No | Add contacts who finished other campaigns | +| `sequence_same_company_in_same_campaign` | boolean | No | Add contacts even if others from the same company are in the sequence | +| `contacts_without_ownership_permission` | boolean | No | Add contacts without ownership permission | +| `add_if_in_queue` | boolean | No | Add contacts even if they are in the queue | +| `contact_verification_skipped` | boolean | No | Skip contact verification when adding | +| `user_id` | string | No | ID of the user performing the action | +| `status` | string | No | Initial status for added contacts: "active" or "paused" | +| `auto_unpause_at` | string | No | ISO 8601 datetime to automatically unpause contacts | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `contacts_added` | json | Array of contact IDs added to the sequence | +| `added` | json | Array of contact objects successfully added to the sequence | +| `skipped` | json | Array of contact objects that were skipped, with reasons | +| `skipped_contact_ids` | json | Skipped contact IDs — either an array of IDs or a hash mapping ID → reason code | +| `emailer_campaign` | json | Details of the emailer campaign \(id, name\) | | `sequence_id` | string | ID of the sequence contacts were added to | | `total_added` | number | Total number of contacts added | +| `total_skipped` | number | Total number of contacts skipped | ### `apollo_task_create` -Create a new task in Apollo +Create one or more tasks in Apollo (one task per contact_id, master key required) #### Input | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | -| `note` | string | Yes | Task note/description | -| `contact_id` | string | No | Contact ID to associate with \(e.g., "con_abc123"\) | -| `account_id` | string | No | Account ID to associate with \(e.g., "acc_abc123"\) | -| `due_at` | string | No | Due date in ISO format | -| `priority` | string | No | Task priority | -| `type` | string | No | Task type | +| `user_id` | string | Yes | ID of the Apollo user the task is assigned to | +| `contact_ids` | array | Yes | Array of contact IDs. One task is created per contact. | +| `priority` | string | No | Task priority: "high", "medium", or "low" \(defaults to "medium"\) | +| `due_at` | string | Yes | Due date/time in ISO 8601 format \(e.g., "2024-12-31T23:59:59Z"\) | +| `type` | string | Yes | Task type: "call", "outreach_manual_email", "linkedin_step_connect", "linkedin_step_message", "linkedin_step_view_profile", "linkedin_step_interact_post", or "action_item" | +| `status` | string | Yes | Task status: "scheduled", "completed", or "skipped" | +| `note` | string | No | Free-form note providing context for the task | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `task` | json | Created task data from Apollo | -| `created` | boolean | Whether the task was successfully created | +| `tasks` | json | Array of created tasks \(when returned by Apollo\) | +| `created` | boolean | Whether the request succeeded | ### `apollo_task_search` @@ -560,9 +630,8 @@ Search for tasks in Apollo | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Apollo API key \(master key required\) | -| `contact_id` | string | No | Filter by contact ID \(e.g., "con_abc123"\) | -| `account_id` | string | No | Filter by account ID \(e.g., "acc_abc123"\) | -| `completed` | boolean | No | Filter by completion status | +| `sort_by_field` | string | No | Sort field: "task_due_at" or "task_priority" | +| `open_factor_names` | array | No | Filter by status. Common values: \["task_types"\] for open tasks, \["task_completed_at"\] for completed tasks. | | `page` | number | No | Page number for pagination \(e.g., 1, 2, 3\) | | `per_page` | number | No | Results per page, max 100 \(e.g., 25, 50, 100\) | diff --git a/apps/sim/app/(landing)/integrations/data/integrations.json b/apps/sim/app/(landing)/integrations/data/integrations.json index 08fa8bb80d7..9de36c28909 100644 --- a/apps/sim/app/(landing)/integrations/data/integrations.json +++ b/apps/sim/app/(landing)/integrations/data/integrations.json @@ -862,7 +862,7 @@ }, { "name": "Bulk Create Accounts", - "description": "Create up to 100 accounts at once in your Apollo database. Note: Apollo does not apply deduplication - duplicate accounts may be created if entries share similar names or domains. Master key required." + "description": "Create up to 100 accounts at once in your Apollo database. Set run_dedupe=true to deduplicate by domain, organization_id, and name. Master key required." }, { "name": "Bulk Update Accounts", @@ -894,7 +894,7 @@ }, { "name": "Create Task", - "description": "Create a new task in Apollo" + "description": "Create one or more tasks in Apollo (one task per contact_id, master key required)" }, { "name": "Search Tasks", diff --git a/apps/sim/blocks/blocks/apollo.ts b/apps/sim/blocks/blocks/apollo.ts index 64bd9448d61..3ee33e90356 100644 --- a/apps/sim/blocks/blocks/apollo.ts +++ b/apps/sim/blocks/blocks/apollo.ts @@ -92,6 +92,21 @@ export const ApolloBlock: BlockConfig = { condition: { field: 'operation', value: 'people_search' }, mode: 'advanced', }, + { + id: 'include_similar_titles', + title: 'Include Similar Titles', + type: 'switch', + condition: { field: 'operation', value: 'people_search' }, + mode: 'advanced', + }, + { + id: 'contact_email_status', + title: 'Contact Email Status', + type: 'code', + placeholder: '["verified", "unverified", "likely to engage"]', + condition: { field: 'operation', value: 'people_search' }, + mode: 'advanced', + }, { id: 'contact_stage_ids', title: 'Contact Stage IDs', @@ -100,6 +115,14 @@ export const ApolloBlock: BlockConfig = { condition: { field: 'operation', value: 'contact_search' }, mode: 'advanced', }, + { + id: 'contact_label_ids', + title: 'Contact Label IDs', + type: 'code', + placeholder: '["label_id_1", "label_id_2"]', + condition: { field: 'operation', value: 'contact_search' }, + mode: 'advanced', + }, // People Enrich Fields { @@ -111,10 +134,7 @@ export const ApolloBlock: BlockConfig = { field: 'operation', value: ['people_enrich', 'contact_create', 'contact_update'], }, - required: { - field: 'operation', - value: 'contact_create', - }, + required: { field: 'operation', value: 'contact_create' }, }, { id: 'last_name', @@ -125,10 +145,7 @@ export const ApolloBlock: BlockConfig = { field: 'operation', value: ['people_enrich', 'contact_create', 'contact_update'], }, - required: { - field: 'operation', - value: 'contact_create', - }, + required: { field: 'operation', value: 'contact_create' }, }, { id: 'email', @@ -147,7 +164,7 @@ export const ApolloBlock: BlockConfig = { placeholder: 'Company name', condition: { field: 'operation', - value: ['people_enrich', 'organization_enrich'], + value: ['people_enrich', 'contact_create', 'contact_update'], }, }, { @@ -157,7 +174,11 @@ export const ApolloBlock: BlockConfig = { placeholder: 'example.com', condition: { field: 'operation', - value: ['people_enrich', 'organization_enrich'], + value: ['people_enrich', 'organization_enrich', 'account_create', 'account_update'], + }, + required: { + field: 'operation', + value: 'organization_enrich', }, }, { @@ -180,6 +201,17 @@ export const ApolloBlock: BlockConfig = { }, mode: 'advanced', }, + { + id: 'webhook_url', + title: 'Phone Reveal Webhook URL', + type: 'short-input', + placeholder: 'https://your-app.com/apollo-phone-webhook', + condition: { + field: 'operation', + value: ['people_enrich', 'people_bulk_enrich'], + }, + mode: 'advanced', + }, // Bulk Enrich Fields { @@ -194,7 +226,8 @@ export const ApolloBlock: BlockConfig = { id: 'organizations', title: 'Organizations (JSON Array)', type: 'code', - placeholder: '[{"organization_name": "Company A", "domain": "companya.com"}]', + placeholder: + '[{"name": "Company A", "domain": "companya.com"}, {"name": "Company B", "domain": "companyb.com"}]', condition: { field: 'operation', value: 'organization_bulk_enrich' }, required: true, }, @@ -205,15 +238,39 @@ export const ApolloBlock: BlockConfig = { title: 'Organization Locations', type: 'code', placeholder: '["San Francisco, CA"]', + condition: { field: 'operation', value: ['organization_search', 'people_search'] }, + mode: 'advanced', + }, + { + id: 'organization_not_locations', + title: 'Excluded Organization Locations', + type: 'code', + placeholder: '["Ireland", "Minnesota"]', condition: { field: 'operation', value: 'organization_search' }, mode: 'advanced', }, + { + id: 'organization_ids', + title: 'Organization IDs', + type: 'code', + placeholder: '["org_id_1", "org_id_2"]', + condition: { field: 'operation', value: ['organization_search', 'people_search'] }, + mode: 'advanced', + }, + { + id: 'q_organization_domains_list', + title: 'Organization Domains', + type: 'code', + placeholder: '["apollo.io", "stripe.com"]', + condition: { field: 'operation', value: ['organization_search', 'people_search'] }, + mode: 'advanced', + }, { id: 'organization_num_employees_ranges', title: 'Employee Count Ranges', type: 'code', - placeholder: '["1-10", "11-50", "51-200"]', - condition: { field: 'operation', value: 'organization_search' }, + placeholder: '["1,10", "11,50", "51,200"]', + condition: { field: 'operation', value: ['organization_search', 'people_search'] }, mode: 'advanced', }, { @@ -229,7 +286,7 @@ export const ApolloBlock: BlockConfig = { title: 'Organization Name', type: 'short-input', placeholder: 'Company name to search', - condition: { field: 'operation', value: 'organization_search' }, + condition: { field: 'operation', value: ['organization_search', 'account_search'] }, }, // Contact Fields @@ -246,10 +303,7 @@ export const ApolloBlock: BlockConfig = { title: 'Job Title', type: 'short-input', placeholder: 'Job title', - condition: { - field: 'operation', - value: ['contact_create', 'contact_update'], - }, + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, mode: 'advanced', }, { @@ -259,17 +313,11 @@ export const ApolloBlock: BlockConfig = { placeholder: 'Apollo account ID', condition: { field: 'operation', - value: [ - 'contact_create', - 'contact_update', - 'account_update', - 'task_create', - 'opportunity_create', - ], + value: ['contact_create', 'contact_update', 'account_update', 'opportunity_create'], }, required: { field: 'operation', - value: ['account_update', 'opportunity_create'], + value: 'account_update', }, }, { @@ -284,7 +332,6 @@ export const ApolloBlock: BlockConfig = { 'contact_update', 'account_create', 'account_update', - 'account_search', 'opportunity_create', 'opportunity_update', ], @@ -292,6 +339,104 @@ export const ApolloBlock: BlockConfig = { mode: 'advanced', }, + { + id: 'website_url', + title: 'Corporate Website URL', + type: 'short-input', + placeholder: 'https://www.apollo.io/', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'label_names', + title: 'Label Names (JSON Array)', + type: 'code', + placeholder: '["Prospects", "VIP"]', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'contact_stage_id', + title: 'Contact Stage ID', + type: 'short-input', + placeholder: 'Apollo contact stage ID', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'present_raw_address', + title: 'Personal Location', + type: 'short-input', + placeholder: 'Atlanta, United States', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'direct_phone', + title: 'Direct Phone', + type: 'short-input', + placeholder: '+1 555 123 4567', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'corporate_phone', + title: 'Corporate Phone', + type: 'short-input', + placeholder: '+1 555 123 4567', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'mobile_phone', + title: 'Mobile Phone', + type: 'short-input', + placeholder: '+1 555 123 4567', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'home_phone', + title: 'Home Phone', + type: 'short-input', + placeholder: '+1 555 123 4567', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'other_phone', + title: 'Other Phone', + type: 'short-input', + placeholder: '+1 555 123 4567', + condition: { field: 'operation', value: ['contact_create', 'contact_update'] }, + mode: 'advanced', + }, + { + id: 'typed_custom_fields', + title: 'Custom Fields (JSON Object)', + type: 'code', + placeholder: '{"custom_field_id": "value"}', + condition: { + field: 'operation', + value: [ + 'contact_create', + 'contact_update', + 'account_create', + 'account_update', + 'opportunity_create', + 'opportunity_update', + ], + }, + mode: 'advanced', + }, + { + id: 'contact_run_dedupe', + title: 'Run Deduplication', + type: 'switch', + condition: { field: 'operation', value: 'contact_create' }, + mode: 'advanced', + }, + // Contact Bulk Operations { id: 'contacts', @@ -304,55 +449,82 @@ export const ApolloBlock: BlockConfig = { }, { id: 'contacts', - title: 'Contacts (JSON Array)', + title: 'Contact IDs (JSON Array)', type: 'code', - placeholder: '[{"id": "contact_id_1", "first_name": "John", "last_name": "Doe"}]', + placeholder: '["contact_id_1", "contact_id_2"]', condition: { field: 'operation', value: 'contact_bulk_update' }, - required: true, }, { - id: 'run_dedupe', - title: 'Run Deduplication', - type: 'switch', - condition: { field: 'operation', value: 'contact_bulk_create' }, - mode: 'advanced', + id: 'contact_attributes', + title: 'Contact Attributes (JSON Array of Objects)', + type: 'code', + placeholder: + '[{"id": "contact_id_1", "first_name": "John", "title": "VP Sales", "owner_id": "user_id"}]', + condition: { field: 'operation', value: 'contact_bulk_update' }, }, - - // Account Fields { - id: 'account_name', - title: 'Account Name', - type: 'short-input', - placeholder: 'Company name', + id: 'async', + title: 'Force Asynchronous Processing', + type: 'switch', condition: { field: 'operation', - value: ['account_create', 'account_update'], + value: ['contact_bulk_update', 'account_bulk_update'], }, - required: { + mode: 'advanced', + }, + { + id: 'run_dedupe', + title: 'Run Deduplication', + type: 'switch', + condition: { field: 'operation', - value: 'account_create', + value: ['contact_bulk_create', 'account_bulk_create'], }, + mode: 'advanced', }, { - id: 'website_url', - title: 'Website URL', - type: 'short-input', - placeholder: 'https://example.com', + id: 'append_label_names', + title: 'Append Label Names (JSON Array)', + type: 'code', + placeholder: '["Hot Lead", "Q4 Outreach"]', condition: { field: 'operation', - value: ['account_create', 'account_update'], + value: ['contact_bulk_create', 'account_bulk_create'], }, mode: 'advanced', }, + + // Account Fields + { + id: 'account_name', + title: 'Account Name', + type: 'short-input', + placeholder: 'Company name', + condition: { field: 'operation', value: ['account_create', 'account_update'] }, + required: { field: 'operation', value: 'account_create' }, + }, { id: 'phone', title: 'Phone Number', type: 'short-input', placeholder: 'Company phone', - condition: { - field: 'operation', - value: ['account_create', 'account_update'], - }, + condition: { field: 'operation', value: ['account_create', 'account_update'] }, + mode: 'advanced', + }, + { + id: 'account_stage_id', + title: 'Account Stage ID', + type: 'short-input', + placeholder: 'Apollo account stage ID', + condition: { field: 'operation', value: ['account_create', 'account_update'] }, + mode: 'advanced', + }, + { + id: 'raw_address', + title: 'Account Address', + type: 'short-input', + placeholder: '123 Main St, San Francisco, CA', + condition: { field: 'operation', value: ['account_create', 'account_update'] }, mode: 'advanced', }, @@ -364,7 +536,7 @@ export const ApolloBlock: BlockConfig = { placeholder: 'Search keywords', condition: { field: 'operation', - value: ['people_search', 'contact_search', 'account_search', 'opportunity_search'], + value: ['people_search', 'contact_search'], }, }, { @@ -375,24 +547,54 @@ export const ApolloBlock: BlockConfig = { condition: { field: 'operation', value: 'account_search' }, mode: 'advanced', }, + { + id: 'account_label_ids', + title: 'Account Label IDs', + type: 'code', + placeholder: '["label_id_1", "label_id_2"]', + condition: { field: 'operation', value: 'account_search' }, + mode: 'advanced', + }, // Account Bulk Operations { id: 'accounts', title: 'Accounts (JSON Array)', type: 'code', - placeholder: - '[{"name": "Company A", "website_url": "https://companya.com", "phone": "+1234567890"}]', + placeholder: '[{"name": "Company A", "domain": "companya.com", "phone": "+1234567890"}]', condition: { field: 'operation', value: 'account_bulk_create' }, required: true, }, { id: 'accounts', - title: 'Accounts (JSON Array)', + title: 'Account IDs (JSON Array)', type: 'code', - placeholder: '[{"id": "account_id_1", "name": "Updated Company Name"}]', + placeholder: '["account_id_1", "account_id_2"]', + condition: { field: 'operation', value: 'account_bulk_update' }, + }, + { + id: 'account_bulk_update_name', + title: 'Uniform Name (used with Account IDs)', + type: 'short-input', + placeholder: 'Updated Account Name', + condition: { field: 'operation', value: 'account_bulk_update' }, + mode: 'advanced', + }, + { + id: 'account_bulk_update_owner_id', + title: 'Uniform Owner ID (used with Account IDs)', + type: 'short-input', + placeholder: 'Apollo user ID', + condition: { field: 'operation', value: 'account_bulk_update' }, + mode: 'advanced', + }, + { + id: 'account_attributes', + title: 'Account Attributes (JSON Array of Objects)', + type: 'code', + placeholder: + '[{"id": "account_id_1", "name": "Acme", "owner_id": "user_id", "account_stage_id": "stage_id"}]', condition: { field: 'operation', value: 'account_bulk_update' }, - required: true, }, // Opportunity Fields @@ -401,46 +603,31 @@ export const ApolloBlock: BlockConfig = { title: 'Opportunity Name', type: 'short-input', placeholder: 'Opportunity name', - condition: { - field: 'operation', - value: ['opportunity_create', 'opportunity_update'], - }, - required: { - field: 'operation', - value: 'opportunity_create', - }, + condition: { field: 'operation', value: ['opportunity_create', 'opportunity_update'] }, + required: { field: 'operation', value: 'opportunity_create' }, }, { id: 'amount', title: 'Amount', type: 'short-input', - placeholder: 'Deal amount (e.g., 50000)', - condition: { - field: 'operation', - value: ['opportunity_create', 'opportunity_update'], - }, + placeholder: 'Plain number, no commas (e.g., 50000)', + condition: { field: 'operation', value: ['opportunity_create', 'opportunity_update'] }, mode: 'advanced', }, { - id: 'stage_id', - title: 'Stage ID', + id: 'opportunity_stage_id', + title: 'Opportunity Stage ID', type: 'short-input', - placeholder: 'Opportunity stage ID', - condition: { - field: 'operation', - value: ['opportunity_create', 'opportunity_update'], - }, + placeholder: 'Apollo opportunity_stage_id', + condition: { field: 'operation', value: ['opportunity_create', 'opportunity_update'] }, mode: 'advanced', }, { - id: 'close_date', + id: 'closed_date', title: 'Close Date', type: 'short-input', - placeholder: 'ISO date (e.g., 2024-12-31)', - condition: { - field: 'operation', - value: ['opportunity_create', 'opportunity_update'], - }, + placeholder: 'YYYY-MM-DD (e.g., 2024-12-31)', + condition: { field: 'operation', value: ['opportunity_create', 'opportunity_update'] }, mode: 'advanced', wandConfig: { enabled: true, @@ -456,17 +643,6 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n generationType: 'timestamp', }, }, - { - id: 'description', - title: 'Description', - type: 'long-input', - placeholder: 'Opportunity description', - condition: { - field: 'operation', - value: ['opportunity_create', 'opportunity_update'], - }, - mode: 'advanced', - }, // Opportunity Get { @@ -474,36 +650,27 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n title: 'Opportunity ID', type: 'short-input', placeholder: 'Apollo opportunity ID', - condition: { - field: 'operation', - value: ['opportunity_get', 'opportunity_update'], - }, + condition: { field: 'operation', value: ['opportunity_get', 'opportunity_update'] }, required: true, }, - // Opportunity Search Fields + // Opportunity / Account / Task Search shared sort { - id: 'account_ids', - title: 'Account IDs', - type: 'code', - placeholder: '["account_id_1", "account_id_2"]', - condition: { field: 'operation', value: 'opportunity_search' }, - mode: 'advanced', - }, - { - id: 'stage_ids', - title: 'Stage IDs', - type: 'code', - placeholder: '["stage_id_1", "stage_id_2"]', - condition: { field: 'operation', value: 'opportunity_search' }, + id: 'sort_by_field', + title: 'Sort By Field', + type: 'short-input', + placeholder: 'Sort field name', + condition: { + field: 'operation', + value: ['opportunity_search', 'account_search', 'task_search', 'contact_search'], + }, mode: 'advanced', }, { - id: 'owner_ids', - title: 'Owner IDs', - type: 'code', - placeholder: '["user_id_1", "user_id_2"]', - condition: { field: 'operation', value: 'opportunity_search' }, + id: 'sort_ascending', + title: 'Sort Ascending', + type: 'switch', + condition: { field: 'operation', value: ['account_search', 'contact_search'] }, mode: 'advanced', }, @@ -515,15 +682,8 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n placeholder: 'Search by sequence name', condition: { field: 'operation', value: 'sequence_search' }, }, - { - id: 'active', - title: 'Active Only', - type: 'switch', - condition: { field: 'operation', value: 'sequence_search' }, - mode: 'advanced', - }, - // Sequence Fields + // Sequence Add Fields { id: 'sequence_id', title: 'Sequence ID', @@ -537,16 +697,137 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n title: 'Contact IDs (JSON Array)', type: 'code', placeholder: '["contact_id_1", "contact_id_2"]', + condition: { field: 'operation', value: ['sequence_add', 'task_create'] }, + required: { field: 'operation', value: 'task_create' }, + }, + { + id: 'sequence_add_label_names', + title: 'Label Names (JSON Array)', + type: 'code', + placeholder: '["Hot Lead", "Q4 Outreach"]', + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, + { + id: 'send_email_from_email_account_id', + title: 'Send Email From (Email Account ID)', + type: 'short-input', + placeholder: 'Apollo email account ID', condition: { field: 'operation', value: 'sequence_add' }, required: true, }, + { + id: 'send_email_from_email_address', + title: 'Send Email From (Email Address)', + type: 'short-input', + placeholder: 'sender@example.com', + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, + { + id: 'sequence_same_company_in_same_campaign', + title: 'Allow Same Company in Same Campaign', + type: 'switch', + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, + { + id: 'contacts_without_ownership_permission', + title: 'Add Contacts Without Ownership Permission', + type: 'switch', + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, + { + id: 'add_if_in_queue', + title: 'Add If In Queue', + type: 'switch', + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, + { + id: 'contact_verification_skipped', + title: 'Skip Contact Verification', + type: 'switch', + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, + { + id: 'sequence_user_id', + title: 'Acting User ID', + type: 'short-input', + placeholder: 'Apollo user ID', + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, + { + id: 'sequence_status', + title: 'Initial Status', + type: 'dropdown', + options: [ + { label: 'Active', id: 'active' }, + { label: 'Paused', id: 'paused' }, + ], + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, + { + id: 'auto_unpause_at', + title: 'Auto Unpause At', + type: 'short-input', + placeholder: 'ISO 8601 (e.g., 2024-12-31T23:59:59Z)', + condition: { field: 'operation', value: 'sequence_add' }, + mode: 'advanced', + }, - // Task Fields + // Task Create Fields { - id: 'note', - title: 'Task Note', - type: 'long-input', - placeholder: 'Task description', + id: 'user_id', + title: 'Assigned User ID', + type: 'short-input', + placeholder: 'Apollo user ID', + condition: { field: 'operation', value: 'task_create' }, + required: true, + }, + { + id: 'priority', + title: 'Priority', + type: 'dropdown', + options: [ + { label: 'High', id: 'high' }, + { label: 'Medium', id: 'medium' }, + { label: 'Low', id: 'low' }, + ], + value: () => 'medium', + condition: { field: 'operation', value: 'task_create' }, + }, + { + id: 'type', + title: 'Task Type', + type: 'dropdown', + options: [ + { label: 'Call', id: 'call' }, + { label: 'Outreach Manual Email', id: 'outreach_manual_email' }, + { label: 'LinkedIn — Connect', id: 'linkedin_step_connect' }, + { label: 'LinkedIn — Message', id: 'linkedin_step_message' }, + { label: 'LinkedIn — View Profile', id: 'linkedin_step_view_profile' }, + { label: 'LinkedIn — Interact with Post', id: 'linkedin_step_interact_post' }, + { label: 'Action Item', id: 'action_item' }, + ], + value: () => 'action_item', + condition: { field: 'operation', value: 'task_create' }, + required: true, + }, + { + id: 'status', + title: 'Status', + type: 'dropdown', + options: [ + { label: 'Scheduled', id: 'scheduled' }, + { label: 'Completed', id: 'completed' }, + { label: 'Skipped', id: 'skipped' }, + ], + value: () => 'scheduled', condition: { field: 'operation', value: 'task_create' }, required: true, }, @@ -554,9 +835,9 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n id: 'due_at', title: 'Due Date', type: 'short-input', - placeholder: 'ISO date (e.g., 2024-12-31T23:59:59Z)', + placeholder: 'ISO 8601 (e.g., 2024-12-31T23:59:59Z)', condition: { field: 'operation', value: 'task_create' }, - mode: 'advanced', + required: true, wandConfig: { enabled: true, prompt: `Generate an ISO 8601 timestamp based on the user's description. @@ -573,9 +854,20 @@ Return ONLY the timestamp string in ISO 8601 format - no explanations, no quotes }, }, { - id: 'completed', - title: 'Completed', - type: 'switch', + id: 'task_notes', + title: 'Task Notes', + type: 'long-input', + placeholder: 'Notes for the task', + condition: { field: 'operation', value: 'task_create' }, + mode: 'advanced', + }, + + // Task Search Fields + { + id: 'open_factor_names', + title: 'Open Factor Names', + type: 'code', + placeholder: '["task_types"]', condition: { field: 'operation', value: 'task_search' }, mode: 'advanced', }, @@ -707,75 +999,120 @@ Return ONLY the timestamp string in ISO 8601 format - no explanations, no quotes }, params: (params) => { const { apiKey, ...rest } = params + const parsedParams: Record = { apiKey, ...rest } - // Parse JSON inputs safely - const parsedParams: any = { apiKey, ...rest } + const parseJsonField = (field: string) => { + const value = (rest as Record)[field] + if (typeof value === 'string' && value.trim() !== '') { + parsedParams[field] = JSON.parse(value) + } + } try { - if (rest.person_titles && typeof rest.person_titles === 'string') { - parsedParams.person_titles = JSON.parse(rest.person_titles) - } - if (rest.person_locations && typeof rest.person_locations === 'string') { - parsedParams.person_locations = JSON.parse(rest.person_locations) - } - if (rest.person_seniorities && typeof rest.person_seniorities === 'string') { - parsedParams.person_seniorities = JSON.parse(rest.person_seniorities) - } - if (rest.organization_names && typeof rest.organization_names === 'string') { - parsedParams.organization_names = JSON.parse(rest.organization_names) - } - if (rest.organization_locations && typeof rest.organization_locations === 'string') { - parsedParams.organization_locations = JSON.parse(rest.organization_locations) - } - if ( - rest.organization_num_employees_ranges && - typeof rest.organization_num_employees_ranges === 'string' - ) { - parsedParams.organization_num_employees_ranges = JSON.parse( - rest.organization_num_employees_ranges - ) + for (const field of [ + 'person_titles', + 'person_locations', + 'person_seniorities', + 'organization_names', + 'organization_locations', + 'organization_not_locations', + 'organization_ids', + 'q_organization_domains_list', + 'contact_email_status', + 'organization_num_employees_ranges', + 'q_organization_keyword_tags', + 'contact_stage_ids', + 'contact_label_ids', + 'account_stage_ids', + 'account_label_ids', + 'people', + 'organizations', + 'contacts', + 'accounts', + 'contact_ids', + 'contact_attributes', + 'account_attributes', + 'label_names', + 'sequence_add_label_names', + 'append_label_names', + 'typed_custom_fields', + 'open_factor_names', + ]) { + parseJsonField(field) } - if ( - rest.q_organization_keyword_tags && - typeof rest.q_organization_keyword_tags === 'string' - ) { - parsedParams.q_organization_keyword_tags = JSON.parse(rest.q_organization_keyword_tags) - } - if (rest.contact_stage_ids && typeof rest.contact_stage_ids === 'string') { - parsedParams.contact_stage_ids = JSON.parse(rest.contact_stage_ids) - } - if (rest.account_stage_ids && typeof rest.account_stage_ids === 'string') { - parsedParams.account_stage_ids = JSON.parse(rest.account_stage_ids) - } - if (rest.people && typeof rest.people === 'string') { - parsedParams.people = JSON.parse(rest.people) - } - if (rest.organizations && typeof rest.organizations === 'string') { - parsedParams.organizations = JSON.parse(rest.organizations) + } catch (error) { + const message = error instanceof Error ? error.message : String(error) + throw new Error(`Invalid JSON input: ${message}`) + } + + const splitBulkUpdateInput = ( + raw: unknown + ): { ids?: string[]; attributes?: Array> } => { + if (!Array.isArray(raw)) return {} + const ids: string[] = [] + const attributes: Array> = [] + for (const item of raw) { + if (typeof item === 'string') { + ids.push(item) + continue + } + if (item && typeof item === 'object' && 'id' in item) { + const obj = item as Record + const id = obj.id + if (typeof id !== 'string') continue + const otherKeys = Object.keys(obj).filter((k) => k !== 'id') + if (otherKeys.length === 0) { + ids.push(id) + } else { + attributes.push(obj) + } + } } - if (rest.contacts && typeof rest.contacts === 'string') { - parsedParams.contacts = JSON.parse(rest.contacts) + return { + ids: ids.length > 0 ? ids : undefined, + attributes: attributes.length > 0 ? attributes : undefined, } - if (rest.accounts && typeof rest.accounts === 'string') { - parsedParams.accounts = JSON.parse(rest.accounts) + } + + if (params.operation === 'contact_bulk_update') { + const { ids, attributes } = splitBulkUpdateInput(parsedParams.contacts) + if (attributes) { + if (parsedParams.contact_attributes === undefined) { + parsedParams.contact_attributes = attributes + } + } else if (ids && parsedParams.contact_ids === undefined) { + parsedParams.contact_ids = ids } - if (rest.contact_ids && typeof rest.contact_ids === 'string') { - parsedParams.contact_ids = JSON.parse(rest.contact_ids) + parsedParams.contacts = undefined + } + + if (params.operation === 'account_bulk_update') { + const { ids, attributes } = splitBulkUpdateInput(parsedParams.accounts) + if (attributes) { + if (parsedParams.account_attributes === undefined) { + parsedParams.account_attributes = attributes + } + } else if (ids && parsedParams.account_ids === undefined) { + parsedParams.account_ids = ids } - if (rest.account_ids && typeof rest.account_ids === 'string') { - parsedParams.account_ids = JSON.parse(rest.account_ids) + parsedParams.accounts = undefined + if (rest.account_bulk_update_name) { + parsedParams.name = rest.account_bulk_update_name } - if (rest.stage_ids && typeof rest.stage_ids === 'string') { - parsedParams.stage_ids = JSON.parse(rest.stage_ids) + if (rest.account_bulk_update_owner_id) { + parsedParams.owner_id = rest.account_bulk_update_owner_id } - if (rest.owner_ids && typeof rest.owner_ids === 'string') { - parsedParams.owner_ids = JSON.parse(rest.owner_ids) + parsedParams.account_bulk_update_name = undefined + parsedParams.account_bulk_update_owner_id = undefined + } + + if (params.operation === 'contact_create') { + if (rest.contact_run_dedupe !== undefined) { + parsedParams.run_dedupe = rest.contact_run_dedupe } - } catch (error: any) { - throw new Error(`Invalid JSON input: ${error.message}`) + parsedParams.contact_run_dedupe = undefined } - // Map UI field names to API parameter names if (params.operation === 'account_create' || params.operation === 'account_update') { if (rest.account_name) parsedParams.name = rest.account_name parsedParams.account_name = undefined @@ -785,6 +1122,28 @@ Return ONLY the timestamp string in ISO 8601 format - no explanations, no quotes parsedParams.account_id = rest.account_id } + if (params.operation === 'sequence_add') { + if (parsedParams.sequence_add_label_names !== undefined) { + parsedParams.label_names = parsedParams.sequence_add_label_names + } + parsedParams.sequence_add_label_names = undefined + if (rest.sequence_user_id !== undefined && rest.sequence_user_id !== '') { + parsedParams.user_id = rest.sequence_user_id + } + parsedParams.sequence_user_id = undefined + if (rest.sequence_status !== undefined && rest.sequence_status !== '') { + parsedParams.status = rest.sequence_status + } + parsedParams.sequence_status = undefined + } + + if (params.operation === 'task_create') { + if (rest.task_notes !== undefined) { + parsedParams.note = rest.task_notes + } + parsedParams.task_notes = undefined + } + if ( params.operation === 'opportunity_create' || params.operation === 'opportunity_update' @@ -793,12 +1152,12 @@ Return ONLY the timestamp string in ISO 8601 format - no explanations, no quotes parsedParams.opportunity_name = undefined } - // Convert page/per_page to numbers if provided if (parsedParams.page) parsedParams.page = Number(parsedParams.page) if (parsedParams.per_page) parsedParams.per_page = Number(parsedParams.per_page) - // Convert amount to number if provided - if (parsedParams.amount) parsedParams.amount = Number(parsedParams.amount) + if (parsedParams.amount !== undefined && parsedParams.amount !== '') { + parsedParams.amount = String(parsedParams.amount) + } return parsedParams }, diff --git a/apps/sim/lib/workflows/migrations/subblock-migrations.ts b/apps/sim/lib/workflows/migrations/subblock-migrations.ts index d796a017288..91e80f9562d 100644 --- a/apps/sim/lib/workflows/migrations/subblock-migrations.ts +++ b/apps/sim/lib/workflows/migrations/subblock-migrations.ts @@ -40,6 +40,16 @@ export const SUBBLOCK_ID_MIGRATIONS: Record> = { expandApplicationFormDefinition: '_removed_expandApplicationFormDefinition', expandSurveyFormDefinitions: '_removed_expandSurveyFormDefinitions', }, + apollo: { + contact_ids_bulk: 'contacts', + account_ids_bulk: 'accounts', + close_date: 'closed_date', + stage_id: 'opportunity_stage_id', + note: 'task_notes', + description: '_removed_description', + stage_ids: '_removed_stage_ids', + owner_ids: '_removed_owner_ids', + }, rippling: { action: '_removed_action', candidateDepartment: '_removed_candidateDepartment', diff --git a/apps/sim/tools/apollo/account_bulk_create.ts b/apps/sim/tools/apollo/account_bulk_create.ts index 6b8cc45f4d4..68a9b3617fa 100644 --- a/apps/sim/tools/apollo/account_bulk_create.ts +++ b/apps/sim/tools/apollo/account_bulk_create.ts @@ -11,7 +11,7 @@ export const apolloAccountBulkCreateTool: ToolConfig< id: 'apollo_account_bulk_create', name: 'Apollo Bulk Create Accounts', description: - 'Create up to 100 accounts at once in your Apollo database. Note: Apollo does not apply deduplication - duplicate accounts may be created if entries share similar names or domains. Master key required.', + 'Create up to 100 accounts at once in your Apollo database. Set run_dedupe=true to deduplicate by domain, organization_id, and name. Master key required.', version: '1.0.0', params: { @@ -26,7 +26,20 @@ export const apolloAccountBulkCreateTool: ToolConfig< required: true, visibility: 'user-or-llm', description: - 'Array of accounts to create (max 100). Each account should include name (required), and optionally website_url, phone, owner_id', + 'Array of accounts to create (max 100). Each account should include a name, and may optionally include domain, phone, phone_status_cd, raw_address, owner_id, linkedin_url, facebook_url, twitter_url, salesforce_id, and hubspot_id.', + }, + append_label_names: { + type: 'array', + required: false, + visibility: 'user-only', + description: 'Array of label names to add to ALL accounts in this request', + }, + run_dedupe: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: + 'When true, performs aggressive deduplication by domain, organization_id, and name (defaults to false)', }, }, @@ -38,9 +51,16 @@ export const apolloAccountBulkCreateTool: ToolConfig< 'Cache-Control': 'no-cache', 'X-Api-Key': params.apiKey, }), - body: (params: ApolloAccountBulkCreateParams) => ({ - accounts: params.accounts.slice(0, 100), - }), + body: (params: ApolloAccountBulkCreateParams) => { + const body: Record = { + accounts: params.accounts.slice(0, 100), + } + if (params.append_label_names?.length) { + body.append_label_names = params.append_label_names + } + if (params.run_dedupe !== undefined) body.run_dedupe = params.run_dedupe + return body + }, }, transformResponse: async (response: Response) => { @@ -50,15 +70,24 @@ export const apolloAccountBulkCreateTool: ToolConfig< } const data = await response.json() + const createdAccounts = Array.isArray(data.created_accounts) + ? data.created_accounts + : Array.isArray(data.accounts) + ? data.accounts + : [] + const existingAccounts = Array.isArray(data.existing_accounts) ? data.existing_accounts : [] + const failedAccounts = Array.isArray(data.failed_accounts) ? data.failed_accounts : [] return { success: true, output: { - created_accounts: data.accounts || data.created_accounts || [], - failed_accounts: data.failed_accounts || [], - total_submitted: data.accounts?.length || 0, - created: data.created_accounts?.length || data.accounts?.length || 0, - failed: data.failed_accounts?.length || 0, + created_accounts: createdAccounts, + existing_accounts: existingAccounts, + failed_accounts: failedAccounts, + total_submitted: createdAccounts.length + existingAccounts.length + failedAccounts.length, + created: createdAccounts.length, + existing: existingAccounts.length, + failed: failedAccounts.length, }, } }, @@ -68,21 +97,29 @@ export const apolloAccountBulkCreateTool: ToolConfig< type: 'json', description: 'Array of newly created accounts', }, + existing_accounts: { + type: 'json', + description: 'Array of existing accounts returned by Apollo (when duplicates are detected)', + }, failed_accounts: { type: 'json', - description: 'Array of accounts that failed to create', + description: 'Array of accounts that failed to be created, with reasons for failure', }, total_submitted: { type: 'number', - description: 'Total number of accounts submitted', + description: 'Total number of accounts in the response (created + existing + failed)', }, created: { type: 'number', description: 'Number of accounts successfully created', }, + existing: { + type: 'number', + description: 'Number of existing accounts found', + }, failed: { type: 'number', - description: 'Number of accounts that failed to create', + description: 'Number of accounts that failed to be created', }, }, } diff --git a/apps/sim/tools/apollo/account_bulk_update.ts b/apps/sim/tools/apollo/account_bulk_update.ts index a78d4f8c45c..8a2df0b8dcd 100644 --- a/apps/sim/tools/apollo/account_bulk_update.ts +++ b/apps/sim/tools/apollo/account_bulk_update.ts @@ -21,12 +21,38 @@ export const apolloAccountBulkUpdateTool: ToolConfig< visibility: 'hidden', description: 'Apollo API key (master key required)', }, - accounts: { + account_ids: { type: 'array', - required: true, + required: false, + visibility: 'user-or-llm', + description: + 'Array of account IDs to update with the same values (max 1000). Use with name/owner_id for uniform updates. Use either this OR account_attributes.', + }, + name: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'When using account_ids, apply this name to all accounts', + }, + owner_id: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'When using account_ids, apply this owner to all accounts', + }, + account_attributes: { + type: 'json', + required: false, visibility: 'user-or-llm', description: - 'Array of accounts to update (max 1000). Each account must include id field, and optionally name, website_url, phone, owner_id', + 'Array of account objects with individual updates (each must include id). Example: [{"id": "acc1", "name": "Acme", "owner_id": "u1", "account_stage_id": "s1", "typed_custom_fields": {"field_id": "value"}}]', + }, + async: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: + 'When true, processes the update asynchronously. Only supported when using account_ids; returns 422 if used with account_attributes.', }, }, @@ -38,9 +64,49 @@ export const apolloAccountBulkUpdateTool: ToolConfig< 'Cache-Control': 'no-cache', 'X-Api-Key': params.apiKey, }), - body: (params: ApolloAccountBulkUpdateParams) => ({ - accounts: params.accounts.slice(0, 1000), - }), + body: (params: ApolloAccountBulkUpdateParams) => { + const body: Record = {} + if (params.account_ids && params.account_ids.length > 0) { + body.account_ids = params.account_ids.slice(0, 1000) + } + if (params.name) body.name = params.name + if (params.owner_id) body.owner_id = params.owner_id + if (params.account_attributes) { + if (Array.isArray(params.account_attributes)) { + if (params.account_attributes.length > 0) { + body.account_attributes = params.account_attributes.slice(0, 1000) + } + } else if ( + typeof params.account_attributes === 'object' && + Object.keys(params.account_attributes).length > 0 + ) { + body.account_attributes = params.account_attributes + } + } + const hasUpdateFields = body.account_attributes || body.name || body.owner_id + if (!hasUpdateFields) { + throw new Error( + 'Apollo account bulk update requires update fields. Provide account_attributes (array of per-account updates with id, or single object paired with account_ids), or pair account_ids with name/owner_id to apply uniformly.' + ) + } + if (!body.account_ids && !body.account_attributes) { + throw new Error( + 'Apollo account bulk update requires account_ids (with name/owner_id) or account_attributes (with embedded ids).' + ) + } + if (body.account_attributes && !Array.isArray(body.account_attributes) && !body.account_ids) { + throw new Error( + 'Apollo account bulk update with object-form account_attributes requires account_ids to identify which accounts to update.' + ) + } + if (body.account_ids && Array.isArray(body.account_attributes)) { + throw new Error( + 'Apollo account bulk update cannot combine account_ids with array-form account_attributes. Use account_ids with name/owner_id (or object-form account_attributes), or use array-form account_attributes alone (each entry carries its own id).' + ) + } + if (params.async !== undefined) body.async = params.async + return body + }, }, transformResponse: async (response: Response) => { @@ -54,35 +120,21 @@ export const apolloAccountBulkUpdateTool: ToolConfig< return { success: true, output: { - updated_accounts: data.accounts || data.updated_accounts || [], - failed_accounts: data.failed_accounts || [], - total_submitted: data.accounts?.length || 0, - updated: data.updated_accounts?.length || data.accounts?.length || 0, - failed: data.failed_accounts?.length || 0, + message: data.message ?? null, + account_ids: data.account_ids ?? [], }, } }, outputs: { - updated_accounts: { - type: 'json', - description: 'Array of successfully updated accounts', + message: { + type: 'string', + description: 'Confirmation message from Apollo', + optional: true, }, - failed_accounts: { + account_ids: { type: 'json', - description: 'Array of accounts that failed to update', - }, - total_submitted: { - type: 'number', - description: 'Total number of accounts submitted', - }, - updated: { - type: 'number', - description: 'Number of accounts successfully updated', - }, - failed: { - type: 'number', - description: 'Number of accounts that failed to update', + description: 'IDs of accounts that were updated', }, }, } diff --git a/apps/sim/tools/apollo/account_create.ts b/apps/sim/tools/apollo/account_create.ts index eb341844098..220dfccc606 100644 --- a/apps/sim/tools/apollo/account_create.ts +++ b/apps/sim/tools/apollo/account_create.ts @@ -15,7 +15,7 @@ export const apolloAccountCreateTool: ToolConfig< type: 'string', required: true, visibility: 'hidden', - description: 'Apollo API key', + description: 'Apollo API key (master key required)', }, name: { type: 'string', @@ -23,23 +23,41 @@ export const apolloAccountCreateTool: ToolConfig< visibility: 'user-or-llm', description: 'Company name (e.g., "Acme Corporation")', }, - website_url: { + domain: { type: 'string', required: false, visibility: 'user-or-llm', - description: 'Company website URL', + description: 'Company domain without www. prefix (e.g., "acme.com")', }, phone: { type: 'string', required: false, visibility: 'user-or-llm', - description: 'Company phone number', + description: 'Primary phone number for the account', }, owner_id: { type: 'string', required: false, visibility: 'user-only', - description: 'User ID of the account owner', + description: 'Apollo user ID of the account owner', + }, + account_stage_id: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Apollo ID for the account stage to assign this account to', + }, + raw_address: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Corporate location (e.g., "San Francisco, CA, USA")', + }, + typed_custom_fields: { + type: 'json', + required: false, + visibility: 'user-only', + description: 'Custom field values as { custom_field_id: value } map', }, }, @@ -52,10 +70,13 @@ export const apolloAccountCreateTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloAccountCreateParams) => { - const body: any = { name: params.name } - if (params.website_url) body.website_url = params.website_url + const body: Record = { name: params.name } + if (params.domain) body.domain = params.domain if (params.phone) body.phone = params.phone if (params.owner_id) body.owner_id = params.owner_id + if (params.account_stage_id) body.account_stage_id = params.account_stage_id + if (params.raw_address) body.raw_address = params.raw_address + if (params.typed_custom_fields) body.typed_custom_fields = params.typed_custom_fields return body }, }, @@ -67,12 +88,13 @@ export const apolloAccountCreateTool: ToolConfig< } const data = await response.json() + const account = data.account ?? (data.id ? data : null) return { success: true, output: { - account: data.account ?? null, - created: !!data.account, + account, + created: !!account, }, } }, diff --git a/apps/sim/tools/apollo/account_search.ts b/apps/sim/tools/apollo/account_search.ts index 78bfa0ce447..e60686d9347 100644 --- a/apps/sim/tools/apollo/account_search.ts +++ b/apps/sim/tools/apollo/account_search.ts @@ -18,23 +18,36 @@ export const apolloAccountSearchTool: ToolConfig< visibility: 'hidden', description: 'Apollo API key (master key required)', }, - q_keywords: { + q_organization_name: { type: 'string', required: false, visibility: 'user-or-llm', - description: 'Keywords to search for in account data', + description: 'Filter accounts by organization name (partial-match search)', }, - owner_id: { - type: 'string', + account_stage_ids: { + type: 'array', required: false, visibility: 'user-only', - description: 'Filter by account owner user ID', + description: 'Filter by account stage IDs', }, - account_stage_ids: { + account_label_ids: { type: 'array', required: false, visibility: 'user-only', - description: 'Filter by account stage IDs', + description: 'Filter by account label IDs', + }, + sort_by_field: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: + 'Sort field: "account_last_activity_date", "account_created_at", or "account_updated_at"', + }, + sort_ascending: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Sort ascending when true. Defaults to descending.', }, page: { type: 'number', @@ -59,15 +72,19 @@ export const apolloAccountSearchTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloAccountSearchParams) => { - const body: any = { + const body: Record = { page: params.page || 1, per_page: Math.min(params.per_page || 25, 100), } - if (params.q_keywords) body.q_keywords = params.q_keywords - if (params.owner_id) body.owner_id = params.owner_id + if (params.q_organization_name) body.q_organization_name = params.q_organization_name if (params.account_stage_ids?.length) { body.account_stage_ids = params.account_stage_ids } + if (params.account_label_ids?.length) { + body.account_label_ids = params.account_label_ids + } + if (params.sort_by_field) body.sort_by_field = params.sort_by_field + if (params.sort_ascending !== undefined) body.sort_ascending = params.sort_ascending return body }, }, @@ -83,7 +100,7 @@ export const apolloAccountSearchTool: ToolConfig< return { success: true, output: { - accounts: data.accounts ?? null, + accounts: data.accounts ?? [], pagination: data.pagination ?? null, }, } @@ -93,7 +110,6 @@ export const apolloAccountSearchTool: ToolConfig< accounts: { type: 'json', description: 'Array of accounts matching the search criteria', - optional: true, }, pagination: { type: 'json', description: 'Pagination information', optional: true }, }, diff --git a/apps/sim/tools/apollo/account_update.ts b/apps/sim/tools/apollo/account_update.ts index 996e80cc646..acaef42cc60 100644 --- a/apps/sim/tools/apollo/account_update.ts +++ b/apps/sim/tools/apollo/account_update.ts @@ -29,11 +29,11 @@ export const apolloAccountUpdateTool: ToolConfig< visibility: 'user-or-llm', description: 'Company name (e.g., "Acme Corporation")', }, - website_url: { + domain: { type: 'string', required: false, visibility: 'user-or-llm', - description: 'Company website URL', + description: 'Company domain (e.g., "acme.com")', }, phone: { type: 'string', @@ -45,13 +45,31 @@ export const apolloAccountUpdateTool: ToolConfig< type: 'string', required: false, visibility: 'user-only', - description: 'User ID of the account owner', + description: 'Apollo user ID of the account owner', + }, + account_stage_id: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Apollo ID for the account stage to assign this account to', + }, + raw_address: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Corporate location (e.g., "San Francisco, CA, USA")', + }, + typed_custom_fields: { + type: 'json', + required: false, + visibility: 'user-only', + description: 'Custom field values as { custom_field_id: value } map', }, }, request: { url: (params: ApolloAccountUpdateParams) => - `https://api.apollo.io/api/v1/accounts/${params.account_id}`, + `https://api.apollo.io/api/v1/accounts/${params.account_id.trim()}`, method: 'PATCH', headers: (params: ApolloAccountUpdateParams) => ({ 'Content-Type': 'application/json', @@ -59,11 +77,14 @@ export const apolloAccountUpdateTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloAccountUpdateParams) => { - const body: any = {} + const body: Record = {} if (params.name) body.name = params.name - if (params.website_url) body.website_url = params.website_url + if (params.domain) body.domain = params.domain if (params.phone) body.phone = params.phone if (params.owner_id) body.owner_id = params.owner_id + if (params.account_stage_id) body.account_stage_id = params.account_stage_id + if (params.raw_address) body.raw_address = params.raw_address + if (params.typed_custom_fields) body.typed_custom_fields = params.typed_custom_fields return body }, }, @@ -75,12 +96,13 @@ export const apolloAccountUpdateTool: ToolConfig< } const data = await response.json() + const account = data.account ?? (data.id ? data : null) return { success: true, output: { - account: data.account ?? null, - updated: !!data.account, + account, + updated: !!account, }, } }, diff --git a/apps/sim/tools/apollo/contact_bulk_create.ts b/apps/sim/tools/apollo/contact_bulk_create.ts index b16a4e94c13..5a012485741 100644 --- a/apps/sim/tools/apollo/contact_bulk_create.ts +++ b/apps/sim/tools/apollo/contact_bulk_create.ts @@ -26,7 +26,13 @@ export const apolloContactBulkCreateTool: ToolConfig< required: true, visibility: 'user-or-llm', description: - 'Array of contacts to create (max 100). Each contact should include first_name, last_name, and optionally email, title, account_id, owner_id', + 'Array of contacts to create (max 100). Each contact may include first_name, last_name, email, title, organization_name, account_id, owner_id, contact_stage_id, linkedin_url, phone (single string) or phone_numbers (array of {raw_number, position}), contact_emails, typed_custom_fields, and CRM IDs (salesforce_contact_id, hubspot_id, team_id) for cross-system matching', + }, + append_label_names: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: 'Label names to add to all contacts in this request (e.g., ["Hot Lead"])', }, run_dedupe: { type: 'boolean', @@ -46,12 +52,15 @@ export const apolloContactBulkCreateTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloContactBulkCreateParams) => { - const body: any = { + const body: Record = { contacts: params.contacts.slice(0, 100), } if (params.run_dedupe !== undefined) { body.run_dedupe = params.run_dedupe } + if (params.append_label_names && params.append_label_names.length > 0) { + body.append_label_names = params.append_label_names + } return body }, }, @@ -63,15 +72,17 @@ export const apolloContactBulkCreateTool: ToolConfig< } const data = await response.json() + const createdContacts = data.created_contacts || data.contacts || [] + const existingContacts = data.existing_contacts || [] return { success: true, output: { - created_contacts: data.contacts || data.created_contacts || [], - existing_contacts: data.existing_contacts || [], - total_submitted: data.contacts?.length || 0, - created: data.created_contacts?.length || data.contacts?.length || 0, - existing: data.existing_contacts?.length || 0, + created_contacts: createdContacts, + existing_contacts: existingContacts, + total_submitted: createdContacts.length + existingContacts.length, + created: createdContacts.length, + existing: existingContacts.length, }, } }, diff --git a/apps/sim/tools/apollo/contact_bulk_update.ts b/apps/sim/tools/apollo/contact_bulk_update.ts index 4f254cfc1f1..298873022b5 100644 --- a/apps/sim/tools/apollo/contact_bulk_update.ts +++ b/apps/sim/tools/apollo/contact_bulk_update.ts @@ -21,12 +21,25 @@ export const apolloContactBulkUpdateTool: ToolConfig< visibility: 'hidden', description: 'Apollo API key (master key required)', }, - contacts: { + contact_ids: { type: 'array', - required: true, + required: false, + visibility: 'user-or-llm', + description: + 'Array of contact IDs to update. Must be paired with an object-form contact_attributes specifying the fields to apply uniformly to all listed contacts.', + }, + contact_attributes: { + type: 'json', + required: false, visibility: 'user-or-llm', description: - 'Array of contacts to update (max 100). Each contact must include id field, and optionally first_name, last_name, email, title, account_id, owner_id', + 'Required. Either an array of per-contact updates (each with id) — used standalone — or a single object of attributes to apply to all contact_ids. Supported fields: owner_id, email, organization_name, title, first_name, last_name, account_id, present_raw_address, linkedin_url, typed_custom_fields', + }, + async: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Force asynchronous processing. Automatically enabled for >100 contacts', }, }, @@ -38,9 +51,41 @@ export const apolloContactBulkUpdateTool: ToolConfig< 'Cache-Control': 'no-cache', 'X-Api-Key': params.apiKey, }), - body: (params: ApolloContactBulkUpdateParams) => ({ - contacts: params.contacts.slice(0, 100), - }), + body: (params: ApolloContactBulkUpdateParams) => { + const body: Record = {} + if (params.contact_ids && params.contact_ids.length > 0) { + body.contact_ids = params.contact_ids.slice(0, 100) + } + if (params.contact_attributes) { + if (Array.isArray(params.contact_attributes)) { + if (params.contact_attributes.length > 0) { + body.contact_attributes = params.contact_attributes.slice(0, 100) + } + } else if ( + typeof params.contact_attributes === 'object' && + Object.keys(params.contact_attributes).length > 0 + ) { + body.contact_attributes = params.contact_attributes + } + } + if (!body.contact_attributes) { + throw new Error( + 'Apollo bulk update requires contact_attributes (the fields to update). Use contact_attributes alone (array of per-contact updates with id) or together with contact_ids (single object applied to all listed contacts).' + ) + } + if (!Array.isArray(body.contact_attributes) && !body.contact_ids) { + throw new Error( + 'Apollo bulk update with object-form contact_attributes requires contact_ids to identify which contacts to update.' + ) + } + if (body.contact_ids && Array.isArray(body.contact_attributes)) { + throw new Error( + 'Apollo contact bulk update cannot combine contact_ids with array-form contact_attributes. Use contact_ids with object-form contact_attributes for uniform updates, or use array-form contact_attributes alone (each entry carries its own id).' + ) + } + if (params.async !== undefined) body.async = params.async + return body + }, }, transformResponse: async (response: Response) => { @@ -54,35 +99,22 @@ export const apolloContactBulkUpdateTool: ToolConfig< return { success: true, output: { - updated_contacts: data.contacts || data.updated_contacts || [], - failed_contacts: data.failed_contacts || [], - total_submitted: data.contacts?.length || 0, - updated: data.updated_contacts?.length || data.contacts?.length || 0, - failed: data.failed_contacts?.length || 0, + message: data.message ?? null, + job_id: data.job_id ?? null, }, } }, outputs: { - updated_contacts: { - type: 'json', - description: 'Array of successfully updated contacts', - }, - failed_contacts: { - type: 'json', - description: 'Array of contacts that failed to update', - }, - total_submitted: { - type: 'number', - description: 'Total number of contacts submitted', - }, - updated: { - type: 'number', - description: 'Number of contacts successfully updated', + message: { + type: 'string', + description: 'Confirmation message from Apollo', + optional: true, }, - failed: { - type: 'number', - description: 'Number of contacts that failed to update', + job_id: { + type: 'string', + description: 'Async job ID (returned for >100 contacts)', + optional: true, }, }, } diff --git a/apps/sim/tools/apollo/contact_create.ts b/apps/sim/tools/apollo/contact_create.ts index ecc61a0b219..c1ebb6e2b62 100644 --- a/apps/sim/tools/apollo/contact_create.ts +++ b/apps/sim/tools/apollo/contact_create.ts @@ -51,7 +51,80 @@ export const apolloContactCreateTool: ToolConfig< type: 'string', required: false, visibility: 'user-only', - description: 'User ID of the contact owner', + description: + 'User ID of the contact owner (accepted by Apollo but not officially documented for POST /contacts)', + }, + organization_name: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Name of the contact\'s employer (e.g., "Apollo")', + }, + website_url: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Corporate website URL (e.g., "https://www.apollo.io/")', + }, + label_names: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: 'Lists/labels to add the contact to (e.g., ["Prospects"])', + }, + contact_stage_id: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Apollo ID for the contact stage', + }, + present_raw_address: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Personal location for the contact (e.g., "Atlanta, United States")', + }, + direct_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Primary phone number', + }, + corporate_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Work/office phone number', + }, + mobile_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Mobile phone number', + }, + home_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Home phone number', + }, + other_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Alternative phone number', + }, + typed_custom_fields: { + type: 'json', + required: false, + visibility: 'user-or-llm', + description: 'Custom field values keyed by custom field ID', + }, + run_dedupe: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'When true, Apollo deduplicates against existing contacts', }, }, @@ -64,7 +137,7 @@ export const apolloContactCreateTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloContactCreateParams) => { - const body: any = { + const body: Record = { first_name: params.first_name, last_name: params.last_name, } @@ -72,6 +145,20 @@ export const apolloContactCreateTool: ToolConfig< if (params.title) body.title = params.title if (params.account_id) body.account_id = params.account_id if (params.owner_id) body.owner_id = params.owner_id + if (params.organization_name) body.organization_name = params.organization_name + if (params.website_url) body.website_url = params.website_url + if (params.label_names && params.label_names.length > 0) { + body.label_names = params.label_names + } + if (params.contact_stage_id) body.contact_stage_id = params.contact_stage_id + if (params.present_raw_address) body.present_raw_address = params.present_raw_address + if (params.direct_phone) body.direct_phone = params.direct_phone + if (params.corporate_phone) body.corporate_phone = params.corporate_phone + if (params.mobile_phone) body.mobile_phone = params.mobile_phone + if (params.home_phone) body.home_phone = params.home_phone + if (params.other_phone) body.other_phone = params.other_phone + if (params.typed_custom_fields) body.typed_custom_fields = params.typed_custom_fields + if (params.run_dedupe !== undefined) body.run_dedupe = params.run_dedupe return body }, }, @@ -83,12 +170,13 @@ export const apolloContactCreateTool: ToolConfig< } const data = await response.json() + const contact = data?.contact ?? (data?.id ? data : null) return { success: true, output: { - contact: data.contact ?? null, - created: !!data.contact, + contact, + created: !!contact, }, } }, diff --git a/apps/sim/tools/apollo/contact_search.ts b/apps/sim/tools/apollo/contact_search.ts index e2c80604634..7295b745c50 100644 --- a/apps/sim/tools/apollo/contact_search.ts +++ b/apps/sim/tools/apollo/contact_search.ts @@ -29,6 +29,25 @@ export const apolloContactSearchTool: ToolConfig< visibility: 'user-only', description: 'Filter by contact stage IDs', }, + contact_label_ids: { + type: 'array', + required: false, + visibility: 'user-only', + description: 'Filter by Apollo label IDs (lists)', + }, + sort_by_field: { + type: 'string', + required: false, + visibility: 'user-only', + description: + 'Sort field: contact_last_activity_date, contact_email_last_opened_at, contact_email_last_clicked_at, contact_created_at, or contact_updated_at', + }, + sort_ascending: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'When true, sort ascending. Must be used together with sort_by_field', + }, page: { type: 'number', required: false, @@ -52,7 +71,7 @@ export const apolloContactSearchTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloContactSearchParams) => { - const body: any = { + const body: Record = { page: params.page || 1, per_page: Math.min(params.per_page || 25, 100), } @@ -60,6 +79,11 @@ export const apolloContactSearchTool: ToolConfig< if (params.contact_stage_ids?.length) { body.contact_stage_ids = params.contact_stage_ids } + if (params.contact_label_ids?.length) { + body.contact_label_ids = params.contact_label_ids + } + if (params.sort_by_field) body.sort_by_field = params.sort_by_field + if (params.sort_ascending !== undefined) body.sort_ascending = params.sort_ascending return body }, }, @@ -75,7 +99,7 @@ export const apolloContactSearchTool: ToolConfig< return { success: true, output: { - contacts: data.contacts ?? null, + contacts: data.contacts ?? [], pagination: data.pagination ?? null, }, } @@ -85,7 +109,6 @@ export const apolloContactSearchTool: ToolConfig< contacts: { type: 'json', description: 'Array of contacts matching the search criteria', - optional: true, }, pagination: { type: 'json', description: 'Pagination information', optional: true }, }, diff --git a/apps/sim/tools/apollo/contact_update.ts b/apps/sim/tools/apollo/contact_update.ts index 31ebd0c877f..3c99984ca6f 100644 --- a/apps/sim/tools/apollo/contact_update.ts +++ b/apps/sim/tools/apollo/contact_update.ts @@ -57,13 +57,81 @@ export const apolloContactUpdateTool: ToolConfig< type: 'string', required: false, visibility: 'user-only', - description: 'User ID of the contact owner', + description: + 'User ID of the contact owner (accepted by Apollo but not officially documented for PATCH /contacts/{id})', + }, + organization_name: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Name of the contact\'s employer (e.g., "Apollo")', + }, + website_url: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Corporate website URL (e.g., "https://www.apollo.io/")', + }, + label_names: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: 'Lists/labels to add the contact to (e.g., ["Prospects"])', + }, + contact_stage_id: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Apollo ID for the contact stage', + }, + present_raw_address: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Personal location for the contact (e.g., "Atlanta, United States")', + }, + direct_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Primary phone number', + }, + corporate_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Work/office phone number', + }, + mobile_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Mobile phone number', + }, + home_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Home phone number', + }, + other_phone: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Alternative phone number', + }, + typed_custom_fields: { + type: 'json', + required: false, + visibility: 'user-or-llm', + description: + 'Custom field values keyed by custom field ID (accepted by Apollo but not officially documented for PATCH /contacts/{id})', }, }, request: { url: (params: ApolloContactUpdateParams) => - `https://api.apollo.io/api/v1/contacts/${params.contact_id}`, + `https://api.apollo.io/api/v1/contacts/${params.contact_id.trim()}`, method: 'PATCH', headers: (params: ApolloContactUpdateParams) => ({ 'Content-Type': 'application/json', @@ -71,13 +139,26 @@ export const apolloContactUpdateTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloContactUpdateParams) => { - const body: any = {} + const body: Record = {} if (params.first_name) body.first_name = params.first_name if (params.last_name) body.last_name = params.last_name if (params.email) body.email = params.email if (params.title) body.title = params.title if (params.account_id) body.account_id = params.account_id if (params.owner_id) body.owner_id = params.owner_id + if (params.organization_name) body.organization_name = params.organization_name + if (params.website_url) body.website_url = params.website_url + if (params.label_names && params.label_names.length > 0) { + body.label_names = params.label_names + } + if (params.contact_stage_id) body.contact_stage_id = params.contact_stage_id + if (params.present_raw_address) body.present_raw_address = params.present_raw_address + if (params.direct_phone) body.direct_phone = params.direct_phone + if (params.corporate_phone) body.corporate_phone = params.corporate_phone + if (params.mobile_phone) body.mobile_phone = params.mobile_phone + if (params.home_phone) body.home_phone = params.home_phone + if (params.other_phone) body.other_phone = params.other_phone + if (params.typed_custom_fields) body.typed_custom_fields = params.typed_custom_fields return body }, }, @@ -89,12 +170,13 @@ export const apolloContactUpdateTool: ToolConfig< } const data = await response.json() + const contact = data?.contact ?? (data?.id ? data : null) return { success: true, output: { - contact: data.contact ?? null, - updated: !!data.contact, + contact, + updated: !!contact, }, } }, diff --git a/apps/sim/tools/apollo/email_accounts.ts b/apps/sim/tools/apollo/email_accounts.ts index ceb854a602f..96b9b58ee94 100644 --- a/apps/sim/tools/apollo/email_accounts.ts +++ b/apps/sim/tools/apollo/email_accounts.ts @@ -36,12 +36,13 @@ export const apolloEmailAccountsTool: ToolConfig< } const data = await response.json() + const accounts = Array.isArray(data) ? data : data.email_accounts || data.data || [] return { success: true, output: { - email_accounts: data.email_accounts || [], - total: data.email_accounts?.length || 0, + email_accounts: accounts, + total: accounts.length, }, } }, diff --git a/apps/sim/tools/apollo/opportunity_create.ts b/apps/sim/tools/apollo/opportunity_create.ts index 4880609e88f..5c949fc692c 100644 --- a/apps/sim/tools/apollo/opportunity_create.ts +++ b/apps/sim/tools/apollo/opportunity_create.ts @@ -28,21 +28,21 @@ export const apolloOpportunityCreateTool: ToolConfig< }, account_id: { type: 'string', - required: true, + required: false, visibility: 'user-or-llm', description: 'ID of the account this opportunity belongs to (e.g., "acc_abc123")', }, amount: { - type: 'number', + type: 'string', required: false, visibility: 'user-or-llm', - description: 'Monetary value of the opportunity', + description: 'Monetary value as a plain number string with no commas or currency symbols', }, - stage_id: { + opportunity_stage_id: { type: 'string', required: false, visibility: 'user-only', - description: 'ID of the deal stage', + description: 'ID of the opportunity stage', }, owner_id: { type: 'string', @@ -50,17 +50,17 @@ export const apolloOpportunityCreateTool: ToolConfig< visibility: 'user-only', description: 'User ID of the opportunity owner', }, - close_date: { + closed_date: { type: 'string', required: false, visibility: 'user-or-llm', - description: 'Expected close date (ISO 8601 format)', + description: 'Expected close date in YYYY-MM-DD format', }, - description: { - type: 'string', + typed_custom_fields: { + type: 'json', required: false, - visibility: 'user-or-llm', - description: 'Description or notes about the opportunity', + visibility: 'user-only', + description: 'Custom field values as { custom_field_id: value } map', }, }, @@ -73,15 +73,15 @@ export const apolloOpportunityCreateTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloOpportunityCreateParams) => { - const body: any = { - name: params.name, - account_id: params.account_id, + const body: Record = { name: params.name } + if (params.account_id) body.account_id = params.account_id + if (params.amount !== undefined && params.amount !== null && params.amount !== '') { + body.amount = String(params.amount) } - if (params.amount !== undefined) body.amount = params.amount - if (params.stage_id) body.stage_id = params.stage_id + if (params.opportunity_stage_id) body.opportunity_stage_id = params.opportunity_stage_id if (params.owner_id) body.owner_id = params.owner_id - if (params.close_date) body.close_date = params.close_date - if (params.description) body.description = params.description + if (params.closed_date) body.closed_date = params.closed_date + if (params.typed_custom_fields) body.typed_custom_fields = params.typed_custom_fields return body }, }, @@ -93,12 +93,13 @@ export const apolloOpportunityCreateTool: ToolConfig< } const data = await response.json() + const opportunity = data.opportunity ?? (data.id ? data : null) return { success: true, output: { - opportunity: data.opportunity ?? null, - created: !!data.opportunity, + opportunity, + created: !!opportunity, }, } }, diff --git a/apps/sim/tools/apollo/opportunity_get.ts b/apps/sim/tools/apollo/opportunity_get.ts index 4ef79fea54d..db8613b91eb 100644 --- a/apps/sim/tools/apollo/opportunity_get.ts +++ b/apps/sim/tools/apollo/opportunity_get.ts @@ -27,7 +27,7 @@ export const apolloOpportunityGetTool: ToolConfig< request: { url: (params: ApolloOpportunityGetParams) => - `https://api.apollo.io/api/v1/opportunities/${params.opportunity_id}`, + `https://api.apollo.io/api/v1/opportunities/${params.opportunity_id.trim()}`, method: 'GET', headers: (params: ApolloOpportunityGetParams) => ({ 'Content-Type': 'application/json', @@ -47,14 +47,18 @@ export const apolloOpportunityGetTool: ToolConfig< return { success: true, output: { - opportunity: data.opportunity || {}, + opportunity: data.opportunity ?? null, found: !!data.opportunity, }, } }, outputs: { - opportunity: { type: 'json', description: 'Complete opportunity data from Apollo' }, + opportunity: { + type: 'json', + description: 'Complete opportunity data from Apollo', + optional: true, + }, found: { type: 'boolean', description: 'Whether the opportunity was found' }, }, } diff --git a/apps/sim/tools/apollo/opportunity_search.ts b/apps/sim/tools/apollo/opportunity_search.ts index 8f1a6db88e9..c7fa7245292 100644 --- a/apps/sim/tools/apollo/opportunity_search.ts +++ b/apps/sim/tools/apollo/opportunity_search.ts @@ -20,29 +20,11 @@ export const apolloOpportunitySearchTool: ToolConfig< visibility: 'hidden', description: 'Apollo API key', }, - q_keywords: { + sort_by_field: { type: 'string', required: false, visibility: 'user-or-llm', - description: 'Keywords to search for in opportunity names', - }, - account_ids: { - type: 'array', - required: false, - visibility: 'user-or-llm', - description: 'Filter by specific account IDs (e.g., ["acc_123", "acc_456"])', - }, - stage_ids: { - type: 'array', - required: false, - visibility: 'user-only', - description: 'Filter by deal stage IDs', - }, - owner_ids: { - type: 'array', - required: false, - visibility: 'user-only', - description: 'Filter by opportunity owner IDs', + description: 'Sort field: "amount", "is_closed", or "is_won"', }, page: { type: 'number', @@ -59,24 +41,18 @@ export const apolloOpportunitySearchTool: ToolConfig< }, request: { - url: 'https://api.apollo.io/api/v1/opportunities/search', - method: 'POST', + url: (params: ApolloOpportunitySearchParams) => { + const query = new URLSearchParams() + query.set('page', String(params.page || 1)) + query.set('per_page', String(Math.min(params.per_page || 25, 100))) + if (params.sort_by_field) query.set('sort_by_field', params.sort_by_field) + return `https://api.apollo.io/api/v1/opportunities/search?${query.toString()}` + }, + method: 'GET', headers: (params: ApolloOpportunitySearchParams) => ({ - 'Content-Type': 'application/json', 'Cache-Control': 'no-cache', 'X-Api-Key': params.apiKey, }), - body: (params: ApolloOpportunitySearchParams) => { - const body: any = { - page: params.page || 1, - per_page: Math.min(params.per_page || 25, 100), - } - if (params.q_keywords) body.q_keywords = params.q_keywords - if (params.account_ids?.length) body.account_ids = params.account_ids - if (params.stage_ids?.length) body.stage_ids = params.stage_ids - if (params.owner_ids?.length) body.owner_ids = params.owner_ids - return body - }, }, transformResponse: async (response: Response) => { @@ -90,10 +66,10 @@ export const apolloOpportunitySearchTool: ToolConfig< return { success: true, output: { - opportunities: data.opportunities || [], - page: data.pagination?.page || 1, - per_page: data.pagination?.per_page || 25, - total_entries: data.pagination?.total_entries || 0, + opportunities: data.opportunities ?? [], + page: data.pagination?.page ?? 1, + per_page: data.pagination?.per_page ?? 25, + total_entries: data.pagination?.total_entries ?? 0, }, } }, diff --git a/apps/sim/tools/apollo/opportunity_update.ts b/apps/sim/tools/apollo/opportunity_update.ts index 8bb31c901dd..aea000414f8 100644 --- a/apps/sim/tools/apollo/opportunity_update.ts +++ b/apps/sim/tools/apollo/opportunity_update.ts @@ -33,16 +33,16 @@ export const apolloOpportunityUpdateTool: ToolConfig< description: 'Name of the opportunity/deal (e.g., "Enterprise License - Q1")', }, amount: { - type: 'number', + type: 'string', required: false, visibility: 'user-or-llm', - description: 'Monetary value of the opportunity', + description: 'Monetary value as a plain number string with no commas or currency symbols', }, - stage_id: { + opportunity_stage_id: { type: 'string', required: false, visibility: 'user-only', - description: 'ID of the deal stage', + description: 'ID of the opportunity stage', }, owner_id: { type: 'string', @@ -50,23 +50,23 @@ export const apolloOpportunityUpdateTool: ToolConfig< visibility: 'user-only', description: 'User ID of the opportunity owner', }, - close_date: { + closed_date: { type: 'string', required: false, visibility: 'user-or-llm', - description: 'Expected close date (ISO 8601 format)', + description: 'Expected close date in YYYY-MM-DD format', }, - description: { - type: 'string', + typed_custom_fields: { + type: 'json', required: false, - visibility: 'user-or-llm', - description: 'Description or notes about the opportunity', + visibility: 'user-only', + description: 'Custom field values as { custom_field_id: value } map', }, }, request: { url: (params: ApolloOpportunityUpdateParams) => - `https://api.apollo.io/api/v1/opportunities/${params.opportunity_id}`, + `https://api.apollo.io/api/v1/opportunities/${params.opportunity_id.trim()}`, method: 'PATCH', headers: (params: ApolloOpportunityUpdateParams) => ({ 'Content-Type': 'application/json', @@ -74,13 +74,15 @@ export const apolloOpportunityUpdateTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloOpportunityUpdateParams) => { - const body: any = {} + const body: Record = {} if (params.name) body.name = params.name - if (params.amount !== undefined) body.amount = params.amount - if (params.stage_id) body.stage_id = params.stage_id + if (params.amount !== undefined && params.amount !== null && params.amount !== '') { + body.amount = String(params.amount) + } + if (params.opportunity_stage_id) body.opportunity_stage_id = params.opportunity_stage_id if (params.owner_id) body.owner_id = params.owner_id - if (params.close_date) body.close_date = params.close_date - if (params.description) body.description = params.description + if (params.closed_date) body.closed_date = params.closed_date + if (params.typed_custom_fields) body.typed_custom_fields = params.typed_custom_fields return body }, }, @@ -92,12 +94,13 @@ export const apolloOpportunityUpdateTool: ToolConfig< } const data = await response.json() + const opportunity = data.opportunity ?? (data.id ? data : null) return { success: true, output: { - opportunity: data.opportunity ?? null, - updated: !!data.opportunity, + opportunity, + updated: !!opportunity, }, } }, diff --git a/apps/sim/tools/apollo/organization_bulk_enrich.ts b/apps/sim/tools/apollo/organization_bulk_enrich.ts index c72d31a0e89..d461318431c 100644 --- a/apps/sim/tools/apollo/organization_bulk_enrich.ts +++ b/apps/sim/tools/apollo/organization_bulk_enrich.ts @@ -24,7 +24,8 @@ export const apolloOrganizationBulkEnrichTool: ToolConfig< type: 'array', required: true, visibility: 'user-or-llm', - description: 'Array of organizations to enrich (max 10)', + description: + 'Array of organizations to enrich (max 10). Each item requires `name` and may include `domain` (e.g., [{"name": "Example Corp", "domain": "example.com"}])', }, }, @@ -37,7 +38,7 @@ export const apolloOrganizationBulkEnrichTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloOrganizationBulkEnrichParams) => ({ - details: params.organizations.slice(0, 10), + organizations: params.organizations.slice(0, 10), }), }, @@ -48,20 +49,28 @@ export const apolloOrganizationBulkEnrichTool: ToolConfig< } const data = await response.json() + const organizations = data.organizations ?? [] return { success: true, output: { - organizations: data.matches || [], - total: data.matches?.length || 0, - enriched: data.matches?.filter((o: any) => o).length || 0, + organizations, + total: data.total_requested_domains ?? organizations.length, + enriched: data.unique_enriched_records ?? organizations.length, + missing_records: data.missing_records ?? 0, + unique_domains: data.unique_domains ?? organizations.length, }, } }, outputs: { organizations: { type: 'json', description: 'Array of enriched organization data' }, - total: { type: 'number', description: 'Total number of organizations processed' }, - enriched: { type: 'number', description: 'Number of organizations successfully enriched' }, + total: { type: 'number', description: 'Total number of domains requested' }, + enriched: { type: 'number', description: 'Number of unique enriched records' }, + missing_records: { + type: 'number', + description: 'Number of domains that could not be enriched', + }, + unique_domains: { type: 'number', description: 'Number of unique domains processed' }, }, } diff --git a/apps/sim/tools/apollo/organization_enrich.ts b/apps/sim/tools/apollo/organization_enrich.ts index d6c1de4f0d7..6c3702c56ab 100644 --- a/apps/sim/tools/apollo/organization_enrich.ts +++ b/apps/sim/tools/apollo/organization_enrich.ts @@ -20,19 +20,11 @@ export const apolloOrganizationEnrichTool: ToolConfig< visibility: 'hidden', description: 'Apollo API key', }, - organization_name: { - type: 'string', - required: false, - visibility: 'user-or-llm', - description: - 'Name of the organization (e.g., "Acme Corporation") - at least one of organization_name or domain is required', - }, domain: { type: 'string', - required: false, + required: true, visibility: 'user-or-llm', - description: - 'Company domain (e.g., "apollo.io", "acme.com") - at least one of domain or organization_name is required', + description: 'Company domain (e.g., "apollo.io", "acme.com")', }, }, @@ -45,17 +37,11 @@ export const apolloOrganizationEnrichTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloOrganizationEnrichParams) => { - // At least one identifier is required - if (!params.organization_name && !params.domain) { - throw new Error( - 'At least one of organization_name or domain is required for organization enrichment' - ) + const domain = params.domain?.trim() + if (!domain) { + throw new Error('domain is required for organization enrichment') } - - const body: any = {} - if (params.organization_name) body.name = params.organization_name - if (params.domain) body.domain = params.domain - return body + return { domain } }, }, @@ -70,14 +56,18 @@ export const apolloOrganizationEnrichTool: ToolConfig< return { success: true, output: { - organization: data.organization || {}, + organization: data.organization ?? null, enriched: !!data.organization, }, } }, outputs: { - organization: { type: 'json', description: 'Enriched organization data from Apollo' }, + organization: { + type: 'json', + description: 'Enriched organization data from Apollo', + optional: true, + }, enriched: { type: 'boolean', description: 'Whether the organization was successfully enriched', diff --git a/apps/sim/tools/apollo/organization_search.ts b/apps/sim/tools/apollo/organization_search.ts index 18f31d376f0..1b6213039fb 100644 --- a/apps/sim/tools/apollo/organization_search.ts +++ b/apps/sim/tools/apollo/organization_search.ts @@ -24,13 +24,20 @@ export const apolloOrganizationSearchTool: ToolConfig< type: 'array', required: false, visibility: 'user-or-llm', - description: 'Company locations to search', + description: 'Company HQ locations (cities, US states, or countries)', + }, + organization_not_locations: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: 'Exclude companies whose HQ is in these locations', }, organization_num_employees_ranges: { type: 'array', required: false, visibility: 'user-or-llm', - description: 'Employee count ranges (e.g., ["1-10", "11-50"])', + description: + 'Employee count ranges as "min,max" strings (e.g., ["1,10", "250,500", "10000,20000"])', }, q_organization_keyword_tags: { type: 'array', @@ -44,6 +51,18 @@ export const apolloOrganizationSearchTool: ToolConfig< visibility: 'user-or-llm', description: 'Organization name to search for (e.g., "Acme", "TechCorp")', }, + organization_ids: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: 'Apollo organization IDs to include (e.g., ["5e66b6381e05b4008c8331b8"])', + }, + q_organization_domains_list: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: 'Domain names to filter by (no www. or @, up to 1,000)', + }, page: { type: 'number', required: false, @@ -67,7 +86,7 @@ export const apolloOrganizationSearchTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloOrganizationSearchParams) => { - const body: any = { + const body: Record = { page: params.page || 1, per_page: Math.min(params.per_page || 25, 100), } @@ -75,6 +94,9 @@ export const apolloOrganizationSearchTool: ToolConfig< if (params.organization_locations?.length) { body.organization_locations = params.organization_locations } + if (params.organization_not_locations?.length) { + body.organization_not_locations = params.organization_not_locations + } if (params.organization_num_employees_ranges?.length) { body.organization_num_employees_ranges = params.organization_num_employees_ranges } @@ -84,6 +106,12 @@ export const apolloOrganizationSearchTool: ToolConfig< if (params.q_organization_name) { body.q_organization_name = params.q_organization_name } + if (params.organization_ids?.length) { + body.organization_ids = params.organization_ids + } + if (params.q_organization_domains_list?.length) { + body.q_organization_domains_list = params.q_organization_domains_list + } return body }, diff --git a/apps/sim/tools/apollo/people_bulk_enrich.ts b/apps/sim/tools/apollo/people_bulk_enrich.ts index cb6c35aa8ea..9c9721ff9a8 100644 --- a/apps/sim/tools/apollo/people_bulk_enrich.ts +++ b/apps/sim/tools/apollo/people_bulk_enrich.ts @@ -36,12 +36,32 @@ export const apolloPeopleBulkEnrichTool: ToolConfig< type: 'boolean', required: false, visibility: 'user-only', - description: 'Reveal phone numbers (uses credits)', + description: 'Reveal phone numbers (uses credits, requires webhook_url)', + }, + webhook_url: { + type: 'string', + required: false, + visibility: 'user-only', + description: + 'Webhook URL for async phone number delivery (required when reveal_phone_number is true)', }, }, request: { - url: 'https://api.apollo.io/api/v1/people/bulk_match', + url: (params: ApolloPeopleBulkEnrichParams) => { + const qs = new URLSearchParams() + if (params.reveal_personal_emails !== undefined) { + qs.set('reveal_personal_emails', String(params.reveal_personal_emails)) + } + if (params.reveal_phone_number !== undefined) { + qs.set('reveal_phone_number', String(params.reveal_phone_number)) + } + if (params.webhook_url) { + qs.set('webhook_url', params.webhook_url) + } + const query = qs.toString() + return `https://api.apollo.io/api/v1/people/bulk_match${query ? `?${query}` : ''}` + }, method: 'POST', headers: (params: ApolloPeopleBulkEnrichParams) => ({ 'Content-Type': 'application/json', @@ -50,8 +70,6 @@ export const apolloPeopleBulkEnrichTool: ToolConfig< }), body: (params: ApolloPeopleBulkEnrichParams) => ({ details: params.people.slice(0, 10), - reveal_personal_emails: params.reveal_personal_emails, - reveal_phone_number: params.reveal_phone_number, }), }, @@ -62,20 +80,46 @@ export const apolloPeopleBulkEnrichTool: ToolConfig< } const data = await response.json() + const matches = Array.isArray(data.matches) + ? data.matches + : Array.isArray(data.people) + ? data.people + : [] return { success: true, output: { - people: data.matches || [], - total: data.matches?.length || 0, - enriched: data.matches?.filter((p: any) => p).length || 0, + matches, + total_requested_enrichments: data.total_requested_enrichments ?? matches.length, + unique_enriched_records: data.unique_enriched_records ?? matches.filter(Boolean).length, + missing_records: data.missing_records ?? null, + credits_consumed: data.credits_consumed ?? null, }, } }, outputs: { - people: { type: 'json', description: 'Array of enriched people data' }, - total: { type: 'number', description: 'Total number of people processed' }, - enriched: { type: 'number', description: 'Number of people successfully enriched' }, + matches: { + type: 'json', + description: 'Array of enriched people (null entries indicate no match)', + }, + total_requested_enrichments: { + type: 'number', + description: 'Total number of records submitted for enrichment', + }, + unique_enriched_records: { + type: 'number', + description: 'Number of records successfully enriched', + }, + missing_records: { + type: 'number', + description: 'Number of records that could not be enriched', + optional: true, + }, + credits_consumed: { + type: 'number', + description: 'Number of Apollo credits consumed by this request', + optional: true, + }, }, } diff --git a/apps/sim/tools/apollo/people_enrich.ts b/apps/sim/tools/apollo/people_enrich.ts index 80e5f0322ee..c231bcf3c07 100644 --- a/apps/sim/tools/apollo/people_enrich.ts +++ b/apps/sim/tools/apollo/people_enrich.ts @@ -29,6 +29,24 @@ export const apolloPeopleEnrichTool: ToolConfig< visibility: 'user-or-llm', description: 'Last name of the person', }, + name: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Full name of the person (alternative to first_name/last_name)', + }, + id: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Apollo ID for the person', + }, + hashed_email: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'MD5 or SHA-256 hashed email', + }, email: { type: 'string', required: false, @@ -63,12 +81,32 @@ export const apolloPeopleEnrichTool: ToolConfig< type: 'boolean', required: false, visibility: 'user-only', - description: 'Reveal phone numbers (uses credits)', + description: 'Reveal phone numbers (uses credits, requires webhook_url)', + }, + webhook_url: { + type: 'string', + required: false, + visibility: 'user-only', + description: + 'Webhook URL for async phone number delivery (required when reveal_phone_number is true)', }, }, request: { - url: 'https://api.apollo.io/api/v1/people/match', + url: (params: ApolloPeopleEnrichParams) => { + const qs = new URLSearchParams() + if (params.reveal_personal_emails !== undefined) { + qs.set('reveal_personal_emails', String(params.reveal_personal_emails)) + } + if (params.reveal_phone_number !== undefined) { + qs.set('reveal_phone_number', String(params.reveal_phone_number)) + } + if (params.webhook_url) { + qs.set('webhook_url', params.webhook_url) + } + const query = qs.toString() + return `https://api.apollo.io/api/v1/people/match${query ? `?${query}` : ''}` + }, method: 'POST', headers: (params: ApolloPeopleEnrichParams) => ({ 'Content-Type': 'application/json', @@ -76,20 +114,17 @@ export const apolloPeopleEnrichTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloPeopleEnrichParams) => { - const body: any = {} + const body: Record = {} if (params.first_name) body.first_name = params.first_name if (params.last_name) body.last_name = params.last_name + if (params.name) body.name = params.name if (params.email) body.email = params.email + if (params.hashed_email) body.hashed_email = params.hashed_email + if (params.id) body.id = params.id if (params.organization_name) body.organization_name = params.organization_name if (params.domain) body.domain = params.domain if (params.linkedin_url) body.linkedin_url = params.linkedin_url - if (params.reveal_personal_emails !== undefined) { - body.reveal_personal_emails = params.reveal_personal_emails - } - if (params.reveal_phone_number !== undefined) { - body.reveal_phone_number = params.reveal_phone_number - } return body }, @@ -106,14 +141,18 @@ export const apolloPeopleEnrichTool: ToolConfig< return { success: true, output: { - person: data.person || {}, + person: data.person ?? null, enriched: !!data.person, }, } }, outputs: { - person: { type: 'json', description: 'Enriched person data from Apollo' }, + person: { + type: 'json', + description: 'Enriched person data from Apollo', + optional: true, + }, enriched: { type: 'boolean', description: 'Whether the person was successfully enriched' }, }, } diff --git a/apps/sim/tools/apollo/people_search.ts b/apps/sim/tools/apollo/people_search.ts index c4841024e8e..7e4bf528a32 100644 --- a/apps/sim/tools/apollo/people_search.ts +++ b/apps/sim/tools/apollo/people_search.ts @@ -23,6 +23,12 @@ export const apolloPeopleSearchTool: ToolConfig< visibility: 'user-or-llm', description: 'Job titles to search for (e.g., ["CEO", "VP of Sales"])', }, + include_similar_titles: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether to return people with job titles similar to person_titles', + }, person_locations: { type: 'array', required: false, @@ -33,13 +39,48 @@ export const apolloPeopleSearchTool: ToolConfig< type: 'array', required: false, visibility: 'user-or-llm', - description: 'Seniority levels (e.g., ["senior", "executive", "manager"])', + description: + 'Seniority levels (one of: owner, founder, c_suite, partner, vp, head, director, manager, senior, entry, intern)', + }, + organization_ids: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: 'Apollo organization IDs to filter by (e.g., ["5e66b6381e05b4008c8331b8"])', }, organization_names: { type: 'array', required: false, visibility: 'user-or-llm', - description: 'Company names to search within', + description: 'Company names to search within (legacy filter)', + }, + organization_locations: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: + "Headquarters locations of the people's current employer (e.g., ['texas', 'tokyo', 'spain'])", + }, + q_organization_domains_list: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: + 'Employer domain names (e.g., ["apollo.io", "microsoft.com"]) — up to 1,000, no www. or @', + }, + organization_num_employees_ranges: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: + 'Employee count ranges for the person\'s current employer. Each entry is "min,max" (e.g., ["1,10", "250,500", "10000,20000"])', + }, + contact_email_status: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: + 'Email statuses to filter by: "verified", "unverified", "likely to engage", "unavailable"', }, q_keywords: { type: 'string', @@ -70,7 +111,7 @@ export const apolloPeopleSearchTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloPeopleSearchParams) => { - const body: any = { + const body: Record = { page: params.page || 1, per_page: Math.min(params.per_page || 25, 100), } @@ -78,15 +119,36 @@ export const apolloPeopleSearchTool: ToolConfig< if (params.person_titles && params.person_titles.length > 0) { body.person_titles = params.person_titles } + if (params.include_similar_titles !== undefined) { + body.include_similar_titles = params.include_similar_titles + } if (params.person_locations && params.person_locations.length > 0) { body.person_locations = params.person_locations } if (params.person_seniorities && params.person_seniorities.length > 0) { body.person_seniorities = params.person_seniorities } + if (params.organization_ids && params.organization_ids.length > 0) { + body.organization_ids = params.organization_ids + } if (params.organization_names && params.organization_names.length > 0) { body.organization_names = params.organization_names } + if (params.organization_locations && params.organization_locations.length > 0) { + body.organization_locations = params.organization_locations + } + if (params.q_organization_domains_list && params.q_organization_domains_list.length > 0) { + body.q_organization_domains_list = params.q_organization_domains_list + } + if ( + params.organization_num_employees_ranges && + params.organization_num_employees_ranges.length > 0 + ) { + body.organization_num_employees_ranges = params.organization_num_employees_ranges + } + if (params.contact_email_status && params.contact_email_status.length > 0) { + body.contact_email_status = params.contact_email_status + } if (params.q_keywords) { body.q_keywords = params.q_keywords } diff --git a/apps/sim/tools/apollo/sequence_add_contacts.ts b/apps/sim/tools/apollo/sequence_add_contacts.ts index 652ddca2dc1..7008a5d894f 100644 --- a/apps/sim/tools/apollo/sequence_add_contacts.ts +++ b/apps/sim/tools/apollo/sequence_add_contacts.ts @@ -28,28 +28,108 @@ export const apolloSequenceAddContactsTool: ToolConfig< }, contact_ids: { type: 'array', + required: false, + visibility: 'user-or-llm', + description: + 'Array of contact IDs to add to the sequence (e.g., ["con_abc123", "con_def456"]). Either contact_ids or label_names must be provided.', + }, + label_names: { + type: 'array', + required: false, + visibility: 'user-or-llm', + description: + 'Array of label names to identify contacts to add to the sequence. Either contact_ids or label_names must be provided.', + }, + send_email_from_email_account_id: { + type: 'string', required: true, visibility: 'user-or-llm', description: - 'Array of contact IDs to add to the sequence (e.g., ["con_abc123", "con_def456"])', + 'ID of the email account to send from. Use the Get Email Accounts operation to look this up.', + }, + send_email_from_email_address: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Specific email address to send from within the email account.', + }, + sequence_no_email: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Add contacts even if they have no email address', + }, + sequence_unverified_email: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Add contacts with unverified email addresses', + }, + sequence_job_change: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Add contacts who recently changed jobs', + }, + sequence_active_in_other_campaigns: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Add contacts active in other campaigns', + }, + sequence_finished_in_other_campaigns: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Add contacts who finished other campaigns', }, - emailer_campaign_id: { + sequence_same_company_in_same_campaign: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Add contacts even if others from the same company are in the sequence', + }, + contacts_without_ownership_permission: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Add contacts without ownership permission', + }, + add_if_in_queue: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Add contacts even if they are in the queue', + }, + contact_verification_skipped: { + type: 'boolean', + required: false, + visibility: 'user-only', + description: 'Skip contact verification when adding', + }, + user_id: { type: 'string', required: false, visibility: 'user-only', - description: 'Optional emailer campaign ID', + description: 'ID of the user performing the action', }, - send_email_from_user_id: { + status: { type: 'string', required: false, visibility: 'user-only', - description: 'User ID to send emails from', + description: 'Initial status for added contacts: "active" or "paused"', + }, + auto_unpause_at: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'ISO 8601 datetime to automatically unpause contacts', }, }, request: { url: (params: ApolloSequenceAddContactsParams) => - `https://api.apollo.io/api/v1/emailer_campaigns/${params.sequence_id}/add_contact_ids`, + `https://api.apollo.io/api/v1/emailer_campaigns/${params.sequence_id.trim()}/add_contact_ids`, method: 'POST', headers: (params: ApolloSequenceAddContactsParams) => ({ 'Content-Type': 'application/json', @@ -57,15 +137,48 @@ export const apolloSequenceAddContactsTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloSequenceAddContactsParams) => { - const body: any = { - contact_ids: params.contact_ids, + const hasContactIds = !!params.contact_ids?.length + const hasLabelNames = !!params.label_names?.length + if (!hasContactIds && !hasLabelNames) { + throw new Error( + 'Apollo sequence add requires either contact_ids or label_names to be provided' + ) + } + const body: Record = { + emailer_campaign_id: params.sequence_id, + send_email_from_email_account_id: params.send_email_from_email_account_id, + } + if (hasContactIds) body.contact_ids = params.contact_ids + if (hasLabelNames) body.label_names = params.label_names + if (params.send_email_from_email_address) { + body.send_email_from_email_address = params.send_email_from_email_address + } + if (params.sequence_no_email !== undefined) body.sequence_no_email = params.sequence_no_email + if (params.sequence_unverified_email !== undefined) { + body.sequence_unverified_email = params.sequence_unverified_email + } + if (params.sequence_job_change !== undefined) { + body.sequence_job_change = params.sequence_job_change + } + if (params.sequence_active_in_other_campaigns !== undefined) { + body.sequence_active_in_other_campaigns = params.sequence_active_in_other_campaigns + } + if (params.sequence_finished_in_other_campaigns !== undefined) { + body.sequence_finished_in_other_campaigns = params.sequence_finished_in_other_campaigns } - if (params.emailer_campaign_id) { - body.emailer_campaign_id = params.emailer_campaign_id + if (params.sequence_same_company_in_same_campaign !== undefined) { + body.sequence_same_company_in_same_campaign = params.sequence_same_company_in_same_campaign } - if (params.send_email_from_user_id) { - body.send_email_from_user_id = params.send_email_from_user_id + if (params.contacts_without_ownership_permission !== undefined) { + body.contacts_without_ownership_permission = params.contacts_without_ownership_permission } + if (params.add_if_in_queue !== undefined) body.add_if_in_queue = params.add_if_in_queue + if (params.contact_verification_skipped !== undefined) { + body.contact_verification_skipped = params.contact_verification_skipped + } + if (params.user_id) body.user_id = params.user_id + if (params.status) body.status = params.status + if (params.auto_unpause_at) body.auto_unpause_at = params.auto_unpause_at return body }, }, @@ -78,19 +191,58 @@ export const apolloSequenceAddContactsTool: ToolConfig< const data = await response.json() + // Apollo's response shape for this endpoint varies: some payloads return a flat + // `contacts: [...]` array of successfully added contacts, others wrap under + // `contacts: { added, skipped }`. Handle both defensively. + const contactsField = data?.contacts + const added = Array.isArray(contactsField) + ? contactsField + : Array.isArray(contactsField?.added) + ? contactsField.added + : [] + const skipped = Array.isArray(contactsField?.skipped) ? contactsField.skipped : [] + const rawSkippedIds = data?.skipped_contact_ids + const skippedIds = + Array.isArray(rawSkippedIds) || (rawSkippedIds && typeof rawSkippedIds === 'object') + ? rawSkippedIds + : null + return { success: true, output: { - contacts_added: data.contacts || params?.contact_ids || [], - sequence_id: params?.sequence_id || '', - total_added: data.contacts?.length || params?.contact_ids?.length || 0, + added, + skipped, + skipped_contact_ids: skippedIds, + emailer_campaign: data?.emailer_campaign ?? null, + sequence_id: params?.sequence_id || data?.emailer_campaign?.id || '', + total_added: added.length, + total_skipped: skipped.length, }, } }, outputs: { - contacts_added: { type: 'json', description: 'Array of contact IDs added to the sequence' }, + added: { + type: 'json', + description: 'Array of contact objects successfully added to the sequence', + }, + skipped: { + type: 'json', + description: 'Array of contact objects that were skipped, with reasons', + }, + skipped_contact_ids: { + type: 'json', + description: + 'Skipped contact IDs — either an array of IDs or a hash mapping ID → reason code', + optional: true, + }, + emailer_campaign: { + type: 'json', + description: 'Details of the emailer campaign (id, name)', + optional: true, + }, sequence_id: { type: 'string', description: 'ID of the sequence contacts were added to' }, total_added: { type: 'number', description: 'Total number of contacts added' }, + total_skipped: { type: 'number', description: 'Total number of contacts skipped' }, }, } diff --git a/apps/sim/tools/apollo/sequence_search.ts b/apps/sim/tools/apollo/sequence_search.ts index 70c5a474d98..c7ed1a8b8ad 100644 --- a/apps/sim/tools/apollo/sequence_search.ts +++ b/apps/sim/tools/apollo/sequence_search.ts @@ -23,12 +23,6 @@ export const apolloSequenceSearchTool: ToolConfig< visibility: 'user-or-llm', description: 'Search sequences by name (e.g., "Outbound Q1", "Follow-up")', }, - active: { - type: 'boolean', - required: false, - visibility: 'user-or-llm', - description: 'Filter by active status (true for active sequences, false for inactive)', - }, page: { type: 'number', required: false, @@ -52,12 +46,11 @@ export const apolloSequenceSearchTool: ToolConfig< 'X-Api-Key': params.apiKey, }), body: (params: ApolloSequenceSearchParams) => { - const body: any = { + const body: Record = { page: params.page || 1, per_page: Math.min(params.per_page || 25, 100), } if (params.q_name) body.q_name = params.q_name - if (params.active !== undefined) body.active = params.active return body }, }, diff --git a/apps/sim/tools/apollo/task_create.ts b/apps/sim/tools/apollo/task_create.ts index d835fe67574..f53b4599e76 100644 --- a/apps/sim/tools/apollo/task_create.ts +++ b/apps/sim/tools/apollo/task_create.ts @@ -4,7 +4,7 @@ import type { ToolConfig } from '@/tools/types' export const apolloTaskCreateTool: ToolConfig = { id: 'apollo_task_create', name: 'Apollo Create Task', - description: 'Create a new task in Apollo', + description: 'Create one or more tasks in Apollo (one task per contact_id, master key required)', version: '1.0.0', params: { @@ -14,41 +14,48 @@ export const apolloTaskCreateTool: ToolConfig { - const body: any = { note: params.note } - if (params.contact_id) body.contact_id = params.contact_id - if (params.account_id) body.account_id = params.account_id - if (params.due_at) body.due_at = params.due_at - if (params.priority) body.priority = params.priority - if (params.type) body.type = params.type + const body: Record = { + user_id: params.user_id, + contact_ids: params.contact_ids, + priority: params.priority || 'medium', + due_at: params.due_at, + type: params.type, + status: params.status, + } + if (params.note) body.note = params.note return body }, }, @@ -77,21 +87,20 @@ export const apolloTaskCreateTool: ToolConfig null) + const tasks = Array.isArray(data?.tasks) ? data.tasks : [] - // Apollo's task creation endpoint currently only returns true, not the task object - // Return the request params as the task data since the API doesn't return it return { success: true, output: { - task: data.task ?? null, - created: data === true || !!data.task, + tasks, + created: true, }, } }, outputs: { - task: { type: 'json', description: 'Created task data from Apollo', optional: true }, - created: { type: 'boolean', description: 'Whether the task was successfully created' }, + tasks: { type: 'json', description: 'Array of created tasks (when returned by Apollo)' }, + created: { type: 'boolean', description: 'Whether the request succeeded' }, }, } diff --git a/apps/sim/tools/apollo/task_search.ts b/apps/sim/tools/apollo/task_search.ts index f85a16851de..6c05ad60db8 100644 --- a/apps/sim/tools/apollo/task_search.ts +++ b/apps/sim/tools/apollo/task_search.ts @@ -14,23 +14,18 @@ export const apolloTaskSearchTool: ToolConfig { - const body: any = { + const body: Record = { page: params.page || 1, per_page: Math.min(params.per_page || 25, 100), } - if (params.contact_id) body.contact_id = params.contact_id - if (params.account_id) body.account_id = params.account_id - if (params.completed !== undefined) body.completed = params.completed + if (params.sort_by_field) body.sort_by_field = params.sort_by_field + if (params.open_factor_names?.length) body.open_factor_names = params.open_factor_names return body }, }, @@ -77,7 +71,7 @@ export const apolloTaskSearchTool: ToolConfig created_at: string } export interface ApolloTask { id: string - note: string + user_id?: string contact_id?: string account_id?: string + type?: string + priority?: string + status?: string due_at?: string - completed: boolean - created_at: string + note?: string + created_at?: string + updated_at?: string } export interface ApolloOpportunity { id: string + team_id?: string name: string - account_id: string - amount?: number - stage_id?: string - owner_id?: string - close_date?: string - description?: string + account_id?: string | null + owner_id?: string | null + salesforce_owner_id?: string | null + amount?: number | string | null + amount_in_team_currency?: number | null + forecasted_revenue?: number | null + exchange_rate_code?: string + exchange_rate_value?: number + closed_date?: string | null + actual_close_date?: string | null + description?: string | null + is_closed?: boolean + is_won?: boolean + stage_name?: string | null + opportunity_stage_id?: string | null + opportunity_pipeline_id?: string | null + source?: string + salesforce_id?: string | null + forecast_category?: string + deal_probability?: number + probability?: number | null + created_by_id?: string + stage_updated_at?: string + next_step?: string | null + next_step_date?: string | null + closed_lost_reason?: string | null + closed_won_reason?: string | null + last_activity_date?: string + existence_level?: string + typed_custom_fields?: Record + opportunity_rule_config_statuses?: unknown[] + opportunity_contact_roles?: unknown[] + currency?: { name?: string; iso_code?: string; symbol?: string } + account?: { id?: string; name?: string; website_url?: string | null } created_at: string + updated_at?: string } interface ApolloBaseParams { @@ -77,10 +119,15 @@ interface ApolloBaseParams { // People Search Types export interface ApolloPeopleSearchParams extends ApolloBaseParams { person_titles?: string[] + include_similar_titles?: boolean person_locations?: string[] person_seniorities?: string[] organization_ids?: string[] organization_names?: string[] + organization_locations?: string[] + q_organization_domains_list?: string[] + organization_num_employees_ranges?: string[] + contact_email_status?: string[] q_keywords?: string page?: number per_page?: number @@ -99,17 +146,21 @@ export interface ApolloPeopleSearchResponse extends ToolResponse { export interface ApolloPeopleEnrichParams extends ApolloBaseParams { first_name?: string last_name?: string + name?: string + id?: string + hashed_email?: string organization_name?: string email?: string domain?: string linkedin_url?: string reveal_personal_emails?: boolean reveal_phone_number?: boolean + webhook_url?: string } export interface ApolloPeopleEnrichResponse extends ToolResponse { output: { - person: ApolloPerson + person: ApolloPerson | null enriched: boolean } } @@ -119,28 +170,38 @@ export interface ApolloPeopleBulkEnrichParams extends ApolloBaseParams { people: Array<{ first_name?: string last_name?: string - organization_name?: string + name?: string email?: string + hashed_email?: string + organization_name?: string domain?: string + id?: string + linkedin_url?: string }> reveal_personal_emails?: boolean reveal_phone_number?: boolean + webhook_url?: string } export interface ApolloPeopleBulkEnrichResponse extends ToolResponse { output: { - people: ApolloPerson[] - total: number - enriched: number + matches: Array + total_requested_enrichments: number + unique_enriched_records: number + missing_records: number | null + credits_consumed: number | null } } // Organization Search Types export interface ApolloOrganizationSearchParams extends ApolloBaseParams { organization_locations?: string[] + organization_not_locations?: string[] organization_num_employees_ranges?: string[] q_organization_keyword_tags?: string[] q_organization_name?: string + organization_ids?: string[] + q_organization_domains_list?: string[] page?: number per_page?: number } @@ -156,23 +217,19 @@ export interface ApolloOrganizationSearchResponse extends ToolResponse { // Organization Enrichment Types export interface ApolloOrganizationEnrichParams extends ApolloBaseParams { - organization_name?: string - domain?: string + domain: string } export interface ApolloOrganizationEnrichResponse extends ToolResponse { output: { - organization: ApolloOrganization + organization: ApolloOrganization | null enriched: boolean } } // Bulk Organization Enrichment Types export interface ApolloOrganizationBulkEnrichParams extends ApolloBaseParams { - organizations: Array<{ - organization_name?: string - domain?: string - }> + organizations: Array<{ name: string; domain?: string }> } export interface ApolloOrganizationBulkEnrichResponse extends ToolResponse { @@ -180,6 +237,8 @@ export interface ApolloOrganizationBulkEnrichResponse extends ToolResponse { organizations: ApolloOrganization[] total: number enriched: number + missing_records: number + unique_domains: number } } @@ -191,6 +250,18 @@ export interface ApolloContactCreateParams extends ApolloBaseParams { title?: string account_id?: string owner_id?: string + organization_name?: string + website_url?: string + label_names?: string[] + contact_stage_id?: string + present_raw_address?: string + direct_phone?: string + corporate_phone?: string + mobile_phone?: string + home_phone?: string + other_phone?: string + typed_custom_fields?: Record + run_dedupe?: boolean } export interface ApolloContactCreateResponse extends ToolResponse { @@ -209,6 +280,17 @@ export interface ApolloContactUpdateParams extends ApolloBaseParams { title?: string account_id?: string owner_id?: string + organization_name?: string + website_url?: string + label_names?: string[] + contact_stage_id?: string + present_raw_address?: string + direct_phone?: string + corporate_phone?: string + mobile_phone?: string + home_phone?: string + other_phone?: string + typed_custom_fields?: Record } export interface ApolloContactUpdateResponse extends ToolResponse { @@ -221,14 +303,26 @@ export interface ApolloContactUpdateResponse extends ToolResponse { // Contact Bulk Create Types export interface ApolloContactBulkCreateParams extends ApolloBaseParams { contacts: Array<{ - first_name: string - last_name: string + first_name?: string + last_name?: string email?: string title?: string + organization_name?: string account_id?: string owner_id?: string + contact_stage_id?: string + linkedin_url?: string + phone?: string + phone_numbers?: Array<{ raw_number: string; position?: number }> + contact_emails?: Array<{ email: string; position?: number }> + salesforce_contact_id?: string + hubspot_id?: string + team_id?: string + typed_custom_fields?: Record + [key: string]: unknown }> run_dedupe?: boolean + append_label_names?: string[] } export interface ApolloContactBulkCreateResponse extends ToolResponse { @@ -243,24 +337,15 @@ export interface ApolloContactBulkCreateResponse extends ToolResponse { // Contact Bulk Update Types export interface ApolloContactBulkUpdateParams extends ApolloBaseParams { - contacts: Array<{ - id: string - first_name?: string - last_name?: string - email?: string - title?: string - account_id?: string - owner_id?: string - }> + contact_ids?: string[] + contact_attributes?: Array<{ id: string; [key: string]: unknown }> | Record + async?: boolean } export interface ApolloContactBulkUpdateResponse extends ToolResponse { output: { - updated_contacts: ApolloContact[] - failed_contacts: Array<{ id: string; error: string }> - total_submitted: number - updated: number - failed: number + message: string | null + job_id: string | null } } @@ -268,6 +353,9 @@ export interface ApolloContactBulkUpdateResponse extends ToolResponse { export interface ApolloContactSearchParams extends ApolloBaseParams { q_keywords?: string contact_stage_ids?: string[] + contact_label_ids?: string[] + sort_by_field?: string + sort_ascending?: boolean page?: number per_page?: number } @@ -281,7 +369,7 @@ export interface ApolloPagination { export interface ApolloContactSearchResponse extends ToolResponse { output: { - contacts: ApolloContact[] | null + contacts: ApolloContact[] pagination: ApolloPagination | null } } @@ -289,9 +377,12 @@ export interface ApolloContactSearchResponse extends ToolResponse { // Account Create Types export interface ApolloAccountCreateParams extends ApolloBaseParams { name: string - website_url?: string + domain?: string phone?: string owner_id?: string + account_stage_id?: string + raw_address?: string + typed_custom_fields?: Record } export interface ApolloAccountCreateResponse extends ToolResponse { @@ -305,9 +396,12 @@ export interface ApolloAccountCreateResponse extends ToolResponse { export interface ApolloAccountUpdateParams extends ApolloBaseParams { account_id: string name?: string - website_url?: string + domain?: string phone?: string owner_id?: string + account_stage_id?: string + raw_address?: string + typed_custom_fields?: Record } export interface ApolloAccountUpdateResponse extends ToolResponse { @@ -319,16 +413,18 @@ export interface ApolloAccountUpdateResponse extends ToolResponse { // Account Search Types export interface ApolloAccountSearchParams extends ApolloBaseParams { - q_keywords?: string - owner_id?: string + q_organization_name?: string account_stage_ids?: string[] + account_label_ids?: string[] + sort_by_field?: string + sort_ascending?: boolean page?: number per_page?: number } export interface ApolloAccountSearchResponse extends ToolResponse { output: { - accounts: ApolloAccount[] | null + accounts: ApolloAccount[] pagination: ApolloPagination | null } } @@ -336,89 +432,128 @@ export interface ApolloAccountSearchResponse extends ToolResponse { // Account Bulk Create Types export interface ApolloAccountBulkCreateParams extends ApolloBaseParams { accounts: Array<{ - name: string - website_url?: string + name?: string + domain?: string phone?: string + phone_status_cd?: string + raw_address?: string owner_id?: string + linkedin_url?: string + facebook_url?: string + twitter_url?: string + salesforce_id?: string + hubspot_id?: string + [key: string]: unknown }> + append_label_names?: string[] + run_dedupe?: boolean } export interface ApolloAccountBulkCreateResponse extends ToolResponse { output: { created_accounts: ApolloAccount[] - failed_accounts: Array<{ name: string; error: string }> + existing_accounts: ApolloAccount[] + failed_accounts: Array> total_submitted: number created: number + existing: number failed: number } } // Account Bulk Update Types export interface ApolloAccountBulkUpdateParams extends ApolloBaseParams { - accounts: Array<{ - id: string - name?: string - website_url?: string - phone?: string - owner_id?: string - }> + account_ids?: string[] + name?: string + owner_id?: string + account_attributes?: Array<{ id: string; [key: string]: unknown }> | Record + async?: boolean } export interface ApolloAccountBulkUpdateResponse extends ToolResponse { output: { - updated_accounts: ApolloAccount[] - failed_accounts: Array<{ id: string; error: string }> - total_submitted: number - updated: number - failed: number + message: string | null + account_ids: string[] } } // Sequence Add Contacts Types export interface ApolloSequenceAddContactsParams extends ApolloBaseParams { sequence_id: string - contact_ids: string[] - emailer_campaign_id?: string - send_email_from_user_id?: string + contact_ids?: string[] + label_names?: string[] + send_email_from_email_account_id: string + send_email_from_email_address?: string + sequence_no_email?: boolean + sequence_unverified_email?: boolean + sequence_job_change?: boolean + sequence_active_in_other_campaigns?: boolean + sequence_finished_in_other_campaigns?: boolean + sequence_same_company_in_same_campaign?: boolean + contacts_without_ownership_permission?: boolean + add_if_in_queue?: boolean + contact_verification_skipped?: boolean + user_id?: string + status?: string + auto_unpause_at?: string +} + +export interface ApolloSequenceAddedContact { + id: string + first_name?: string + last_name?: string + email?: string + status?: string + opened_rate?: number | null + replied_rate?: number | null +} + +export interface ApolloSequenceSkippedContact { + id: string + reason: string } export interface ApolloSequenceAddContactsResponse extends ToolResponse { output: { - contacts_added: string[] + added: ApolloSequenceAddedContact[] + skipped: ApolloSequenceSkippedContact[] + skipped_contact_ids: string[] | Record | null + emailer_campaign: { id: string; name: string } | null sequence_id: string total_added: number + total_skipped: number } } // Task Create Types export interface ApolloTaskCreateParams extends ApolloBaseParams { - note: string - contact_id?: string - account_id?: string - due_at?: string + user_id: string + contact_ids: string[] priority?: string - type?: string + due_at: string + type: string + status: string + note?: string } export interface ApolloTaskCreateResponse extends ToolResponse { output: { - task: ApolloTask | null + tasks: ApolloTask[] created: boolean } } // Task Search Types export interface ApolloTaskSearchParams extends ApolloBaseParams { - contact_id?: string - account_id?: string - completed?: boolean + sort_by_field?: string + open_factor_names?: string[] page?: number per_page?: number } export interface ApolloTaskSearchResponse extends ToolResponse { output: { - tasks: ApolloTask[] | null + tasks: ApolloTask[] pagination: ApolloPagination | null } } @@ -426,13 +561,18 @@ export interface ApolloTaskSearchResponse extends ToolResponse { // Email Accounts List Types export interface ApolloEmailAccountsParams extends ApolloBaseParams {} +export interface ApolloEmailAccount { + id: string | number + email: string + type?: string + active?: boolean + default?: boolean + linked_at?: string | null +} + export interface ApolloEmailAccountsResponse extends ToolResponse { output: { - email_accounts: Array<{ - id: string - email: string - active: boolean - }> + email_accounts: ApolloEmailAccount[] total: number } } @@ -440,12 +580,12 @@ export interface ApolloEmailAccountsResponse extends ToolResponse { // Opportunity Create Types export interface ApolloOpportunityCreateParams extends ApolloBaseParams { name: string - account_id: string - amount?: number - stage_id?: string + account_id?: string + amount?: string + opportunity_stage_id?: string owner_id?: string - close_date?: string - description?: string + closed_date?: string + typed_custom_fields?: Record } export interface ApolloOpportunityCreateResponse extends ToolResponse { @@ -457,10 +597,7 @@ export interface ApolloOpportunityCreateResponse extends ToolResponse { // Opportunity Search Types export interface ApolloOpportunitySearchParams extends ApolloBaseParams { - q_keywords?: string - account_ids?: string[] - stage_ids?: string[] - owner_ids?: string[] + sort_by_field?: string page?: number per_page?: number } @@ -481,7 +618,7 @@ export interface ApolloOpportunityGetParams extends ApolloBaseParams { export interface ApolloOpportunityGetResponse extends ToolResponse { output: { - opportunity: ApolloOpportunity + opportunity: ApolloOpportunity | null found: boolean } } @@ -490,11 +627,11 @@ export interface ApolloOpportunityGetResponse extends ToolResponse { export interface ApolloOpportunityUpdateParams extends ApolloBaseParams { opportunity_id: string name?: string - amount?: number - stage_id?: string + amount?: string + opportunity_stage_id?: string owner_id?: string - close_date?: string - description?: string + closed_date?: string + typed_custom_fields?: Record } export interface ApolloOpportunityUpdateResponse extends ToolResponse { @@ -520,7 +657,6 @@ export interface ApolloSequence { // Sequence Search Types export interface ApolloSequenceSearchParams extends ApolloBaseParams { q_name?: string - active?: boolean page?: number per_page?: number } From a2aa648f86a5afb955dd37e1a3c11f6fca58ae53 Mon Sep 17 00:00:00 2001 From: Waleed Date: Thu, 7 May 2026 20:01:09 -0700 Subject: [PATCH 12/33] improvement(sandbox): upgrade pptx/docx/pdf bootstrap with image helpers, MIME guards, and 256 MB isolate limit (#4505) * improvement(sandbox): upgrade pptx/docx/pdf bootstrap with image helpers, MIME guards, and 256 MB isolate limit * fix(sandbox): strict MIME allowlist and nullish coalescing in docx addImage * fix(sandbox): validate required opts in pdf drawImage to prevent silent origin placement * fix(sandbox): throw on malformed data URI in docx addImage * fix(sandbox): prevent opts from clobbering computed ImageRun data/type/transformation * fix(sandbox): prevent opts from clobbering fetched data in pptx addImage * fix(sandbox): validate required opts in pptx addImage * fix(sandbox): remove silent image/png fallback in docx addImage MIME parsing * fix(sandbox): consistency and cleanup pass on doc-gen tasks and worker - DOCX addImage: upfront width/height validation (matches PDF/PPTX pattern) - PDF embedImage: remove dead Buffer ternary; drop redundant size guard already enforced in getFileBase64 - isolated-vm-worker: add friendly MemoryLimitError branch in both execute paths so OOM produces a clear message instead of a raw V8 error --- apps/sim/lib/execution/isolated-vm-worker.cjs | 36 ++++++++++- apps/sim/sandbox-tasks/docx-generate.ts | 56 ++++++++++++++++- apps/sim/sandbox-tasks/pdf-generate.ts | 63 +++++++++++++++++-- apps/sim/sandbox-tasks/pptx-generate.ts | 47 +++++++++++++- 4 files changed, 191 insertions(+), 11 deletions(-) diff --git a/apps/sim/lib/execution/isolated-vm-worker.cjs b/apps/sim/lib/execution/isolated-vm-worker.cjs index 0bca38e4c2b..aa23858e151 100644 --- a/apps/sim/lib/execution/isolated-vm-worker.cjs +++ b/apps/sim/lib/execution/isolated-vm-worker.cjs @@ -183,7 +183,7 @@ async function executeCode(request, executionId) { const externalCopies = [] try { - isolate = new ivm.Isolate({ memoryLimit: 128 }) + isolate = new ivm.Isolate({ memoryLimit: 256 }) if (executionId !== undefined) activeIsolates.set(executionId, isolate) context = await isolate.createContext() const jail = context.global @@ -398,6 +398,21 @@ async function executeCode(request, executionId) { } } + if ( + err.message.includes('Array buffer allocation failed') || + err.message.includes('memory limit') + ) { + return { + result: null, + stdout, + error: { + message: + 'Execution exceeded memory limit (256 MB). Reduce image sizes or split the work into smaller batches.', + name: 'MemoryLimitError', + }, + } + } + return { result: null, stdout, @@ -511,7 +526,7 @@ async function executeTask(request, executionId) { let tPhase = tStart try { - isolate = new ivm.Isolate({ memoryLimit: 128 }) + isolate = new ivm.Isolate({ memoryLimit: 256 }) if (executionId !== undefined) activeIsolates.set(executionId, isolate) context = await isolate.createContext() const jail = context.global @@ -937,6 +952,23 @@ async function executeTask(request, executionId) { timings, } } + + if ( + err.message?.includes('Array buffer allocation failed') || + err.message?.includes('memory limit') + ) { + return { + result: null, + stdout, + error: { + message: + 'Execution exceeded memory limit (256 MB). Reduce image sizes or split the work into smaller batches.', + name: 'MemoryLimitError', + }, + timings, + } + } + return { result: null, stdout, diff --git a/apps/sim/sandbox-tasks/docx-generate.ts b/apps/sim/sandbox-tasks/docx-generate.ts index 04efa68abeb..214b9f8f41f 100644 --- a/apps/sim/sandbox-tasks/docx-generate.ts +++ b/apps/sim/sandbox-tasks/docx-generate.ts @@ -15,10 +15,64 @@ export const docxGenerateTask = defineSandboxTask({ globalThis.addSection = (section) => { globalThis.__docxSections.push(section); }; - globalThis.getFileBase64 = async (fileId) => { + + // Page geometry constants (twips, 1 twip = 1/1440 inch) for US Letter + globalThis.PAGE_W = 12240; // 8.5" + globalThis.PAGE_H = 15840; // 11" + globalThis.MARGIN = 1440; // 1" margins + globalThis.CONTENT_W = 9360; // PAGE_W - 2 * MARGIN + + // 6 MB raw ≈ 8 MB base64; reject above this to avoid sandbox OOM. + const _MAX_IMG_B64 = 8 * 1024 * 1024; + + /** + * getFileBase64(fileId) — load a workspace file as a full data URI string. + * Returns the complete "data:image/png;base64,..." string. + * Use addImage() rather than passing this directly to ImageRun. + */ + globalThis.getFileBase64 = async function getFileBase64(fileId) { + if (!fileId || typeof fileId !== 'string') { + throw new Error('getFileBase64: fileId must be a non-empty string'); + } const res = await globalThis.__brokers.workspaceFile({ fileId }); + if (!res || !res.dataUri) { + throw new Error('getFileBase64: broker returned no data for file ' + fileId); + } + if (res.dataUri.length > _MAX_IMG_B64) { + throw new Error( + 'getFileBase64: image exceeds the 6 MB embed limit (~8 MB base64). Use a smaller/compressed image.' + ); + } return res.dataUri; }; + + /** + * addImage(fileId, opts) — fetch a workspace file and return a docx.ImageRun. + * Required opts: width, height (pixels or EMUs via transformation option). + * Example: + * new docx.Paragraph({ children: [await addImage('abc123', { width: 200, height: 100 })] }) + */ + globalThis.addImage = async function addImage(fileId, opts) { + if (!opts || opts.width == null || opts.height == null) { + throw new Error('addImage: opts must include width and height (in pixels)'); + } + const dataUri = await globalThis.getFileBase64(fileId); + const comma = dataUri.indexOf(','); + if (comma === -1) throw new Error('addImage: invalid data URI (no comma separator)'); + const header = dataUri.slice(0, comma); + const base64 = dataUri.slice(comma + 1); + const mime = header.split(';')[0].replace('data:', ''); + const extMap = { 'image/png': 'png', 'image/jpeg': 'jpg', 'image/jpg': 'jpg', 'image/gif': 'gif', 'image/bmp': 'bmp', 'image/svg+xml': 'svg' }; + const ext = extMap[mime]; + if (!ext) throw new Error('addImage: unsupported image type "' + mime + '". Use PNG, JPEG, GIF, BMP, or SVG.'); + if (!globalThis.Buffer) throw new Error('addImage: Buffer polyfill missing — ensure docx bundle is loaded'); + const { width, height, type: _t, data: _d, transformation: userTransform, ...passThrough } = opts; + return new globalThis.docx.ImageRun(Object.assign(passThrough, { + data: globalThis.Buffer.from(base64, 'base64'), + type: ext, + transformation: Object.assign({ width, height }, userTransform || {}), + })); + }; `, // JSZip's browser build doesn't support nodebuffer output, so we go through // base64 and decode back to bytes inside the isolate (avoids DataURL / Blob). diff --git a/apps/sim/sandbox-tasks/pdf-generate.ts b/apps/sim/sandbox-tasks/pdf-generate.ts index 4a6cbc26f25..a7f23e710f1 100644 --- a/apps/sim/sandbox-tasks/pdf-generate.ts +++ b/apps/sim/sandbox-tasks/pdf-generate.ts @@ -12,20 +12,71 @@ export const pdfGenerateTask = defineSandboxTask({ if (!PDFLib) throw new Error('pdf-lib bundle not loaded'); globalThis.PDFLib = PDFLib; globalThis.pdf = await PDFLib.PDFDocument.create(); - globalThis.embedImage = async (dataUri) => { + + // Convenience shortcuts — avoids verbose PDFLib.rgb() / PDFLib.StandardFonts.Helvetica + globalThis.rgb = PDFLib.rgb; + globalThis.StandardFonts = PDFLib.StandardFonts; + + // Page-size constants in points (1pt = 1/72 inch) + globalThis.LETTER = [612, 792]; // 8.5" × 11" + globalThis.A4 = [595.28, 841.89]; // 210mm × 297mm + + // 6 MB raw ≈ 8 MB base64; reject above this to avoid sandbox OOM. + const _MAX_IMG_B64 = 8 * 1024 * 1024; + + /** + * embedImage(dataUri) — embed a data-URI image into the active PDF document. + * Dispatches to embedPng or embedJpg based on MIME type. + */ + globalThis.embedImage = async function embedImage(dataUri) { + if (!dataUri || typeof dataUri !== 'string') { + throw new Error('embedImage: dataUri must be a non-empty string'); + } const comma = dataUri.indexOf(','); + if (comma === -1) throw new Error('embedImage: invalid data URI (no comma separator)'); const header = dataUri.slice(0, comma); const base64 = dataUri.slice(comma + 1); - const binary = globalThis.Buffer ? globalThis.Buffer.from(base64, 'base64') : null; - if (!binary) throw new Error('Buffer polyfill missing'); + if (!globalThis.Buffer) throw new Error('embedImage: Buffer polyfill missing'); + const binary = globalThis.Buffer.from(base64, 'base64'); const mime = header.split(';')[0].split(':')[1] || ''; - if (mime.includes('png')) return globalThis.pdf.embedPng(binary); - return globalThis.pdf.embedJpg(binary); + // image/jpg is non-standard but tolerated; the canonical MIME is image/jpeg + if (mime === 'image/png') return globalThis.pdf.embedPng(binary); + if (mime === 'image/jpeg' || mime === 'image/jpg') return globalThis.pdf.embedJpg(binary); + throw new Error('embedImage: only PNG and JPEG are supported (got ' + (mime || 'unknown — check data URI header') + ')'); }; - globalThis.getFileBase64 = async (fileId) => { + + /** + * getFileBase64(fileId) — load a workspace file as a data URI string. + */ + globalThis.getFileBase64 = async function getFileBase64(fileId) { + if (!fileId || typeof fileId !== 'string') { + throw new Error('getFileBase64: fileId must be a non-empty string'); + } const res = await globalThis.__brokers.workspaceFile({ fileId }); + if (!res || !res.dataUri) { + throw new Error('getFileBase64: broker returned no data for file ' + fileId); + } + if (res.dataUri.length > _MAX_IMG_B64) { + throw new Error( + 'getFileBase64: image exceeds the 6 MB embed limit (~8 MB base64). Use a smaller/compressed image.' + ); + } return res.dataUri; }; + + /** + * drawImage(page, fileId, opts) — fetch a workspace file and draw it on the given page. + * Required opts: x, y, width, height (points). + * Example: await drawImage(page, 'abc123', { x: 50, y: 700, width: 200, height: 100 }); + */ + globalThis.drawImage = async function drawImage(page, fileId, opts) { + if (!opts || opts.x == null || opts.y == null || opts.width == null || opts.height == null) { + throw new Error('drawImage: opts must include x, y, width, and height (in points)'); + } + const dataUri = await globalThis.getFileBase64(fileId); + const img = await globalThis.embedImage(dataUri); + page.drawImage(img, opts); + }; `, finalize: ` const pdf = globalThis.pdf; diff --git a/apps/sim/sandbox-tasks/pptx-generate.ts b/apps/sim/sandbox-tasks/pptx-generate.ts index bca608791dd..986954da8d6 100644 --- a/apps/sim/sandbox-tasks/pptx-generate.ts +++ b/apps/sim/sandbox-tasks/pptx-generate.ts @@ -11,9 +11,52 @@ export const pptxGenerateTask = defineSandboxTask({ const PptxGenJS = globalThis.__bundles['pptxgenjs']; if (!PptxGenJS) throw new Error('pptxgenjs bundle not loaded'); globalThis.pptx = new PptxGenJS(); - globalThis.getFileBase64 = async (fileId) => { + globalThis.pptx.layout = 'LAYOUT_16x9'; + + // Slide geometry for LAYOUT_16x9 (inches) + globalThis.SLIDE_W = 10; + globalThis.SLIDE_H = 5.625; + globalThis.MARGIN = 0.5; + globalThis.CONTENT_W = 9; // SLIDE_W - 2 * MARGIN + globalThis.CONTENT_H = 3.8; // usable body height below a standard title row + + // ── Image helpers ────────────────────────────────────────────────────────── + // 6 MB raw ≈ 8 MB base64; reject above this to avoid sandbox OOM. + const _MAX_IMG_B64 = 8 * 1024 * 1024; + + /** + * getFileBase64(fileId) — load a workspace file as a data URI string. + * PptxGenJS data format: "image/png;base64," (no "data:" prefix). + * Use as: slide.addImage({ data: await getFileBase64(fileId), x, y, w, h }) + */ + globalThis.getFileBase64 = async function getFileBase64(fileId) { + if (!fileId || typeof fileId !== 'string') { + throw new Error('getFileBase64: fileId must be a non-empty string'); + } const res = await globalThis.__brokers.workspaceFile({ fileId }); - return res.dataUri; + if (!res || !res.dataUri) { + throw new Error('getFileBase64: broker returned no data for file ' + fileId); + } + if (res.dataUri.length > _MAX_IMG_B64) { + throw new Error( + 'getFileBase64: image exceeds the 6 MB embed limit (~8 MB base64). Use a smaller/compressed image.' + ); + } + // PptxGenJS expects "image/png;base64,..." — strip the leading "data:" if present + return res.dataUri.replace(/^data:/, ''); + }; + + /** + * addImage(slide, fileId, opts) — fetch a workspace file and embed it. + * Required opts: x, y, w, h (inches). + * Example: await addImage(slide, 'abc123', { x: 0.5, y: 1, w: 2, h: 1 }); + */ + globalThis.addImage = async function addImage(slide, fileId, opts) { + if (!opts || opts.x == null || opts.y == null || opts.w == null || opts.h == null) { + throw new Error('addImage: opts must include x, y, w, and h (in inches)'); + } + const data = await globalThis.getFileBase64(fileId); + slide.addImage(Object.assign({}, opts, { data })); }; `, finalize: ` From 235a62fd957d5347c964f9006487bed6f8146892 Mon Sep 17 00:00:00 2001 From: Waleed Date: Thu, 7 May 2026 20:01:35 -0700 Subject: [PATCH 13/33] fix(hunter): align tools, block, and outputs with Hunter.io v2 API spec (#4511) * fix(hunter): align tools, block, and outputs with Hunter.io v2 API spec * fix(hunter): match documented Discover response and coerce numeric employees --- apps/docs/content/docs/en/tools/hunter.mdx | 68 +++-- .../integrations/data/integrations.json | 2 +- apps/sim/blocks/blocks/hunter.ts | 160 ++++++++++- apps/sim/tools/hunter/companies_find.ts | 60 ++-- apps/sim/tools/hunter/discover.ts | 27 +- apps/sim/tools/hunter/domain_search.ts | 121 +++----- apps/sim/tools/hunter/email_finder.ts | 35 ++- apps/sim/tools/hunter/hunter.test.ts | 269 ++++++++++++++++++ apps/sim/tools/hunter/types.ts | 162 ++++------- 9 files changed, 639 insertions(+), 265 deletions(-) create mode 100644 apps/sim/tools/hunter/hunter.test.ts diff --git a/apps/docs/content/docs/en/tools/hunter.mdx b/apps/docs/content/docs/en/tools/hunter.mdx index d1ea0fff4aa..a87b1f9e151 100644 --- a/apps/docs/content/docs/en/tools/hunter.mdx +++ b/apps/docs/content/docs/en/tools/hunter.mdx @@ -1,5 +1,5 @@ --- -title: Hunter io +title: Hunter.io description: Find and verify professional email addresses --- @@ -53,11 +53,16 @@ Returns companies matching a set of criteria using Hunter.io AI-powered search. | Parameter | Type | Description | | --------- | ---- | ----------- | | `results` | array | List of companies matching the search criteria | +| ↳ `name` | string | Company name | | ↳ `domain` | string | Company domain | -| ↳ `name` | string | Company/organization name | -| ↳ `headcount` | number | Company size/headcount | -| ↳ `technologies` | array | Technologies used by the company | -| ↳ `email_count` | number | Total number of email addresses found | +| ↳ `logo` | string | URL of the company logo | +| ↳ `linkedin_url` | string | LinkedIn profile URL of the company | +| ↳ `company_type` | string | Company type \(e.g., privately held, public company\) | +| ↳ `industry` | string | Industry of the company | +| ↳ `size` | string | Headcount range of the company | +| ↳ `location` | string | Headquarters location | +| ↳ `founded_year` | number | Year the company was founded | +| ↳ `crunchbase_url` | string | Crunchbase URL of the company | ### `hunter_domain_search` @@ -86,8 +91,9 @@ Returns all the email addresses found using one given domain name, with sources. | ↳ `first_name` | string | Person's first name | | ↳ `last_name` | string | Person's last name | | ↳ `position` | string | Job title/position | +| ↳ `position_raw` | string | Raw job title as found | | ↳ `seniority` | string | Seniority level \(junior, senior, executive\) | -| ↳ `department` | string | Department \(executive, it, finance, management, sales, legal, support, hr, marketing, communication\) | +| ↳ `department` | string | Department \(executive, it, finance, management, sales, legal, support, hr, marketing, communication, education, design, health, operations\) | | ↳ `linkedin` | string | LinkedIn profile URL | | ↳ `twitter` | string | Twitter handle | | ↳ `phone_number` | string | Phone number | @@ -106,19 +112,7 @@ Returns all the email addresses found using one given domain name, with sources. | `accept_all` | boolean | Whether the server accepts all email addresses \(may cause false positives\) | | `pattern` | string | The email pattern used by the organization \(e.g., \{first\}, \{first\}.\{last\}\) | | `organization` | string | The organization/company name | -| `description` | string | Description of the organization | -| `industry` | string | Industry classification of the organization | -| `twitter` | string | Twitter handle of the organization | -| `facebook` | string | Facebook page URL of the organization | -| `linkedin` | string | LinkedIn company page URL | -| `instagram` | string | Instagram profile of the organization | -| `youtube` | string | YouTube channel of the organization | -| `technologies` | array | Technologies used by the organization | -| `country` | string | Country where the organization is headquartered | -| `state` | string | State/province where the organization is located | -| `city` | string | City where the organization is located | -| `postal_code` | string | Postal code of the organization | -| `street` | string | Street address of the organization | +| `linked_domains` | array | Other domains linked to the organization | ### `hunter_email_finder` @@ -147,8 +141,17 @@ Finds the most likely email address for a person given their name and company do | `verification` | object | Email verification information | | ↳ `date` | string | Date when the email was verified \(YYYY-MM-DD\) | | ↳ `status` | string | Verification status \(valid, invalid, accept_all, webmail, disposable, unknown\) | +| `first_name` | string | Person's first name | +| `last_name` | string | Person's last name | | `email` | string | The found email address | | `score` | number | Confidence score \(0-100\) for the found email address | +| `domain` | string | Domain that was searched | +| `accept_all` | boolean | Whether the server accepts all email addresses \(may cause false positives\) | +| `position` | string | Job title/position | +| `twitter` | string | Twitter handle | +| `linkedin_url` | string | LinkedIn profile URL | +| `phone_number` | string | Phone number | +| `company` | string | Company name | ### `hunter_email_verifier` @@ -200,15 +203,24 @@ Enriches company data using domain name. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `company` | object | Company information | -| ↳ `name` | string | Company name | -| ↳ `domain` | string | Company domain | -| ↳ `industry` | string | Industry classification | -| ↳ `size` | string | Company size/headcount range | -| ↳ `country` | string | Country where the company is located | -| ↳ `linkedin` | string | LinkedIn company page URL | -| ↳ `twitter` | string | Twitter handle | -| `person` | object | Person information \(undefined for companies_find tool\) | +| `name` | string | Company name | +| `domain` | string | Company domain | +| `description` | string | Company description | +| `industry` | string | Industry classification | +| `sector` | string | Business sector | +| `size` | string | Employee headcount range \(e.g., "11-50"\) | +| `founded_year` | number | Year founded | +| `location` | string | Headquarters location \(formatted\) | +| `country` | string | Country \(full name\) | +| `country_code` | string | ISO 3166-1 alpha-2 country code | +| `state` | string | State/province | +| `city` | string | City | +| `linkedin` | string | LinkedIn handle \(e.g., company/hunterio\) | +| `twitter` | string | Twitter handle | +| `facebook` | string | Facebook handle | +| `logo` | string | Company logo URL | +| `phone` | string | Company phone number | +| `tech` | array | Technologies used by the company | ### `hunter_email_count` diff --git a/apps/sim/app/(landing)/integrations/data/integrations.json b/apps/sim/app/(landing)/integrations/data/integrations.json index 9de36c28909..91d27d0e157 100644 --- a/apps/sim/app/(landing)/integrations/data/integrations.json +++ b/apps/sim/app/(landing)/integrations/data/integrations.json @@ -6526,7 +6526,7 @@ { "type": "hunter", "slug": "hunter-io", - "name": "Hunter io", + "name": "Hunter.io", "description": "Find and verify professional email addresses", "longDescription": "Integrate Hunter into the workflow. Can search domains, find email addresses, verify email addresses, discover companies, find companies, and count email addresses.", "bgColor": "#E0E0E0", diff --git a/apps/sim/blocks/blocks/hunter.ts b/apps/sim/blocks/blocks/hunter.ts index 01aac501357..4c26bb85d43 100644 --- a/apps/sim/blocks/blocks/hunter.ts +++ b/apps/sim/blocks/blocks/hunter.ts @@ -4,7 +4,7 @@ import type { HunterResponse } from '@/tools/hunter/types' export const HunterBlock: BlockConfig = { type: 'hunter', - name: 'Hunter io', + name: 'Hunter.io', description: 'Find and verify professional email addresses', authMode: AuthMode.ApiKey, longDescription: @@ -45,6 +45,15 @@ export const HunterBlock: BlockConfig = { type: 'short-input', placeholder: '10', condition: { field: 'operation', value: 'hunter_domain_search' }, + mode: 'advanced', + }, + { + id: 'offset', + title: 'Offset', + type: 'short-input', + placeholder: '0', + condition: { field: 'operation', value: 'hunter_domain_search' }, + mode: 'advanced', }, { id: 'type', @@ -57,6 +66,7 @@ export const HunterBlock: BlockConfig = { ], value: () => 'all', condition: { field: 'operation', value: 'hunter_domain_search' }, + mode: 'advanced', }, { id: 'seniority', @@ -70,6 +80,7 @@ export const HunterBlock: BlockConfig = { ], value: () => 'all', condition: { field: 'operation', value: 'hunter_domain_search' }, + mode: 'advanced', }, { id: 'department', @@ -77,6 +88,7 @@ export const HunterBlock: BlockConfig = { type: 'short-input', placeholder: 'e.g., sales, marketing, engineering', condition: { field: 'operation', value: 'hunter_domain_search' }, + mode: 'advanced', }, // Email Finder operation inputs { @@ -109,6 +121,7 @@ export const HunterBlock: BlockConfig = { type: 'short-input', placeholder: 'Enter company name', condition: { field: 'operation', value: 'hunter_email_finder' }, + mode: 'advanced', }, // Email Verifier operation inputs { @@ -146,6 +159,54 @@ Return ONLY the search query text - no explanations.`, type: 'short-input', placeholder: 'Filter by domain', condition: { field: 'operation', value: 'hunter_discover' }, + mode: 'advanced', + }, + { + id: 'headcount', + title: 'Headcount', + type: 'dropdown', + options: [ + { label: 'Any', id: '' }, + { label: '1-10', id: '1-10' }, + { label: '11-50', id: '11-50' }, + { label: '51-200', id: '51-200' }, + { label: '201-500', id: '201-500' }, + { label: '501-1000', id: '501-1000' }, + { label: '1001-5000', id: '1001-5000' }, + { label: '5001-10000', id: '5001-10000' }, + { label: '10001+', id: '10001+' }, + ], + value: () => '', + condition: { field: 'operation', value: 'hunter_discover' }, + mode: 'advanced', + }, + { + id: 'company_type', + title: 'Company Type', + type: 'dropdown', + options: [ + { label: 'Any', id: '' }, + { label: 'Educational', id: 'educational' }, + { label: 'Government Agency', id: 'government agency' }, + { label: 'Non Profit', id: 'non profit' }, + { label: 'Partnership', id: 'partnership' }, + { label: 'Privately Held', id: 'privately held' }, + { label: 'Public Company', id: 'public company' }, + { label: 'Self Employed', id: 'self employed' }, + { label: 'Self Owned', id: 'self owned' }, + { label: 'Sole Proprietorship', id: 'sole proprietorship' }, + ], + value: () => '', + condition: { field: 'operation', value: 'hunter_discover' }, + mode: 'advanced', + }, + { + id: 'technology', + title: 'Technology', + type: 'short-input', + placeholder: 'e.g., react, salesforce', + condition: { field: 'operation', value: 'hunter_discover' }, + mode: 'advanced', }, // Find Company operation inputs @@ -172,6 +233,7 @@ Return ONLY the search query text - no explanations.`, type: 'short-input', placeholder: 'Enter company name', condition: { field: 'operation', value: 'hunter_email_count' }, + mode: 'advanced', }, { id: 'type', @@ -184,6 +246,7 @@ Return ONLY the search query text - no explanations.`, ], value: () => 'all', condition: { field: 'operation', value: 'hunter_email_count' }, + mode: 'advanced', }, // API Key (common) { @@ -225,7 +288,14 @@ Return ONLY the search query text - no explanations.`, }, params: (params) => { const result: Record = {} - if (params.limit) result.limit = Number(params.limit) + for (const [key, value] of Object.entries(params)) { + if (value === undefined || value === null || value === '') continue + if (key === 'limit' || key === 'offset') { + result[key] = Number(value) + } else { + result[key] = value + } + } return result }, }, @@ -253,14 +323,88 @@ Return ONLY the search query text - no explanations.`, technology: { type: 'string', description: 'Technology filter' }, }, outputs: { - results: { type: 'json', description: 'Search results' }, - emails: { type: 'json', description: 'Email addresses found' }, + // Domain Search + domain: { type: 'string', description: 'Domain name' }, + organization: { type: 'string', description: 'Organization name (domain search)' }, + pattern: { type: 'string', description: 'Email pattern (e.g., {first}.{last})' }, + disposable: { type: 'boolean', description: 'Whether the domain is disposable' }, + webmail: { type: 'boolean', description: 'Whether the domain is a webmail provider' }, + accept_all: { type: 'boolean', description: 'Whether the server accepts all emails' }, + linked_domains: { type: 'array', description: 'Linked domains' }, + emails: { + type: 'array', + description: + 'List of emails found for the domain (value, type, confidence, first_name, last_name, position, seniority, department, linkedin, twitter, phone_number, sources, verification)', + }, + // Email Finder email: { type: 'string', description: 'Found email address' }, - score: { type: 'number', description: 'Confidence score' }, - result: { type: 'string', description: 'Verification result' }, - status: { type: 'string', description: 'Status message' }, - total: { type: 'number', description: 'Total results count' }, + score: { type: 'number', description: 'Confidence score (0-100)' }, + first_name: { type: 'string', description: 'Person first name' }, + last_name: { type: 'string', description: 'Person last name' }, + position: { type: 'string', description: 'Job position' }, + linkedin_url: { type: 'string', description: 'LinkedIn profile URL (email-finder, discover)' }, + phone_number: { type: 'string', description: 'Phone number' }, + company: { type: 'string', description: 'Company name (email-finder)' }, + sources: { + type: 'array', + description: + 'Source pages where the email was found (domain, uri, extracted_on, last_seen_on, still_on_page)', + }, + verification: { + type: 'json', + description: 'Email verification information (date, status)', + }, + // Email Verifier + result: { + type: 'string', + description: 'Deliverability result (deliverable, undeliverable, risky)', + }, + status: { + type: 'string', + description: 'Verification status (valid, invalid, accept_all, webmail, disposable, unknown)', + }, + regexp: { type: 'boolean', description: 'Email passes regex validation' }, + gibberish: { type: 'boolean', description: 'Whether email looks auto-generated' }, + mx_records: { type: 'boolean', description: 'MX records exist for the domain' }, + smtp_server: { type: 'boolean', description: 'SMTP server reachable' }, + smtp_check: { type: 'boolean', description: 'Email does not bounce' }, + block: { type: 'boolean', description: 'Whether the domain blocks verification' }, + // Discover + results: { + type: 'array', + description: + 'Companies matching the search (domain, organization, personal_emails, generic_emails, total_emails)', + }, + // Companies Find (flattened) + name: { type: 'string', description: 'Company name (companies-find, discover)' }, + description: { type: 'string', description: 'Company description' }, + industry: { type: 'string', description: 'Industry classification' }, + sector: { type: 'string', description: 'Business sector' }, + size: { type: 'string', description: 'Employee headcount range (e.g., "11-50")' }, + founded_year: { type: 'number', description: 'Year founded' }, + location: { type: 'string', description: 'Headquarters location (formatted)' }, + country: { type: 'string', description: 'Country (full name)' }, + country_code: { type: 'string', description: 'ISO 3166-1 alpha-2 country code' }, + state: { type: 'string', description: 'State/province' }, + city: { type: 'string', description: 'City' }, + linkedin: { type: 'string', description: 'LinkedIn handle (companies-find)' }, + twitter: { type: 'string', description: 'Twitter handle' }, + facebook: { type: 'string', description: 'Facebook handle' }, + logo: { type: 'string', description: 'Company logo URL' }, + phone: { type: 'string', description: 'Company phone number' }, + tech: { type: 'array', description: 'Technologies used by the company' }, + // Email Count + total: { type: 'number', description: 'Total email count' }, personal_emails: { type: 'number', description: 'Personal emails count' }, generic_emails: { type: 'number', description: 'Generic emails count' }, + department: { + type: 'json', + description: + 'Email count by department (executive, it, finance, management, sales, legal, support, hr, marketing, communication, education, design, health, operations)', + }, + seniority: { + type: 'json', + description: 'Email count by seniority level (junior, senior, executive)', + }, }, } diff --git a/apps/sim/tools/hunter/companies_find.ts b/apps/sim/tools/hunter/companies_find.ts index 1ba15585c8a..4b158a91f3a 100644 --- a/apps/sim/tools/hunter/companies_find.ts +++ b/apps/sim/tools/hunter/companies_find.ts @@ -1,5 +1,4 @@ import type { HunterEnrichmentParams, HunterEnrichmentResponse } from '@/tools/hunter/types' -import { COMPANY_OUTPUT } from '@/tools/hunter/types' import type { ToolConfig } from '@/tools/types' export const companiesFindTool: ToolConfig = { @@ -39,32 +38,57 @@ export const companiesFindTool: ToolConfig { const data = await response.json() + const c = data.data ?? {} return { success: true, output: { - person: undefined, - company: data.data - ? { - name: data.data.name || '', - domain: data.data.domain || '', - industry: data.data.industry || '', - size: data.data.size || '', - country: data.data.country || '', - linkedin: data.data.linkedin || '', - twitter: data.data.twitter || '', - } - : undefined, + name: c.name ?? '', + domain: c.domain ?? '', + description: c.description ?? '', + industry: c.category?.industry ?? '', + sector: c.category?.sector ?? '', + size: + c.metrics?.employeesRange ?? + (c.metrics?.employees != null ? String(c.metrics.employees) : ''), + founded_year: c.foundedYear ?? null, + location: c.location ?? '', + country: c.geo?.country ?? '', + country_code: c.geo?.countryCode ?? '', + state: c.geo?.state ?? '', + city: c.geo?.city ?? '', + linkedin: c.linkedin?.handle ?? '', + twitter: c.twitter?.handle ?? '', + facebook: c.facebook?.handle ?? '', + logo: c.logo ?? '', + phone: c.phone ?? '', + tech: c.tech ?? [], }, } }, outputs: { - person: { - type: 'object', - description: 'Person information (undefined for companies_find tool)', - optional: true, + name: { type: 'string', description: 'Company name' }, + domain: { type: 'string', description: 'Company domain' }, + description: { type: 'string', description: 'Company description' }, + industry: { type: 'string', description: 'Industry classification' }, + sector: { type: 'string', description: 'Business sector' }, + size: { type: 'string', description: 'Employee headcount range (e.g., "11-50")' }, + founded_year: { type: 'number', description: 'Year founded', optional: true }, + location: { type: 'string', description: 'Headquarters location (formatted)' }, + country: { type: 'string', description: 'Country (full name)' }, + country_code: { type: 'string', description: 'ISO 3166-1 alpha-2 country code' }, + state: { type: 'string', description: 'State/province' }, + city: { type: 'string', description: 'City' }, + linkedin: { type: 'string', description: 'LinkedIn handle (e.g., company/hunterio)' }, + twitter: { type: 'string', description: 'Twitter handle' }, + facebook: { type: 'string', description: 'Facebook handle' }, + logo: { type: 'string', description: 'Company logo URL' }, + phone: { type: 'string', description: 'Company phone number' }, + tech: { + type: 'array', + description: 'Technologies used by the company', + items: { type: 'string', description: 'Technology name' }, }, - company: COMPANY_OUTPUT, }, } diff --git a/apps/sim/tools/hunter/discover.ts b/apps/sim/tools/hunter/discover.ts index 1be006fc2e5..bdcd26c97da 100644 --- a/apps/sim/tools/hunter/discover.ts +++ b/apps/sim/tools/hunter/discover.ts @@ -71,13 +71,12 @@ export const discoverTool: ToolConfig { - const body: Record = {} + const body: Record = {} - // Add optional parameters if provided if (params.query) body.query = params.query if (params.domain) body.organization = { domain: [params.domain] } - if (params.headcount) body.headcount = params.headcount - if (params.company_type) body.company_type = params.company_type + if (params.headcount) body.headcount = [params.headcount] + if (params.company_type) body.company_type = { include: [params.company_type] } if (params.technology) { body.technology = { include: [params.technology], @@ -90,18 +89,22 @@ export const discoverTool: ToolConfig { const data = await response.json() + const companies: Array<{ + domain?: string + organization?: string + emails_count?: { personal?: number; generic?: number; total?: number } + }> = Array.isArray(data?.data) ? data.data : [] return { success: true, output: { - results: - data.data?.map((company: any) => ({ - domain: company.domain || '', - name: company.organization || '', - headcount: company.headcount, - technologies: company.technologies || [], - email_count: company.emails_count?.total || 0, - })) || [], + results: companies.map((c) => ({ + domain: c.domain ?? '', + organization: c.organization ?? '', + personal_emails: c.emails_count?.personal ?? 0, + generic_emails: c.emails_count?.generic ?? 0, + total_emails: c.emails_count?.total ?? 0, + })), }, } }, diff --git a/apps/sim/tools/hunter/domain_search.ts b/apps/sim/tools/hunter/domain_search.ts index 5f031e49271..33d99cbd508 100644 --- a/apps/sim/tools/hunter/domain_search.ts +++ b/apps/sim/tools/hunter/domain_search.ts @@ -1,4 +1,8 @@ -import type { HunterDomainSearchParams, HunterDomainSearchResponse } from '@/tools/hunter/types' +import type { + HunterDomainSearchParams, + HunterDomainSearchResponse, + HunterEmail, +} from '@/tools/hunter/types' import { EMAILS_OUTPUT } from '@/tools/hunter/types' import type { ToolConfig } from '@/tools/types' @@ -77,45 +81,35 @@ export const domainSearchTool: ToolConfig { const data = await response.json() + const d = data.data ?? {} return { success: true, output: { - domain: data.data?.domain || '', - disposable: data.data?.disposable || false, - webmail: data.data?.webmail || false, - accept_all: data.data?.accept_all || false, - pattern: data.data?.pattern || '', - organization: data.data?.organization || '', - description: data.data?.description || '', - industry: data.data?.industry || '', - twitter: data.data?.twitter || '', - facebook: data.data?.facebook || '', - linkedin: data.data?.linkedin || '', - instagram: data.data?.instagram || '', - youtube: data.data?.youtube || '', - technologies: data.data?.technologies || [], - country: data.data?.country || '', - state: data.data?.state || '', - city: data.data?.city || '', - postal_code: data.data?.postal_code || '', - street: data.data?.street || '', + domain: d.domain ?? '', + disposable: d.disposable ?? false, + webmail: d.webmail ?? false, + accept_all: d.accept_all ?? false, + pattern: d.pattern ?? '', + organization: d.organization ?? '', + linked_domains: d.linked_domains ?? [], emails: - data.data?.emails?.map((email: any) => ({ - value: email.value || '', - type: email.type || '', - confidence: email.confidence || 0, - sources: email.sources || [], - first_name: email.first_name || '', - last_name: email.last_name || '', - position: email.position || '', - seniority: email.seniority || '', - department: email.department || '', - linkedin: email.linkedin || '', - twitter: email.twitter || '', - phone_number: email.phone_number || '', - verification: email.verification || {}, - })) || [], + d.emails?.map((email: Partial) => ({ + value: email.value ?? '', + type: email.type ?? '', + confidence: email.confidence ?? 0, + sources: email.sources ?? [], + first_name: email.first_name ?? null, + last_name: email.last_name ?? null, + position: email.position ?? null, + position_raw: email.position_raw ?? null, + seniority: email.seniority ?? null, + department: email.department ?? null, + linkedin: email.linkedin ?? null, + twitter: email.twitter ?? null, + phone_number: email.phone_number ?? null, + verification: email.verification ?? { date: null, status: 'unknown' }, + })) ?? [], }, } }, @@ -145,61 +139,10 @@ export const domainSearchTool: ToolConfig { const data = await response.json() + const d = data.data ?? {} return { success: true, output: { - email: data.data?.email || '', - score: data.data?.score || 0, - sources: data.data?.sources || [], - verification: data.data?.verification || {}, + first_name: d.first_name ?? '', + last_name: d.last_name ?? '', + email: d.email ?? '', + score: d.score ?? 0, + domain: d.domain ?? '', + accept_all: d.accept_all ?? false, + position: d.position ?? null, + twitter: d.twitter ?? null, + linkedin_url: d.linkedin_url ?? null, + phone_number: d.phone_number ?? null, + company: d.company ?? null, + sources: d.sources ?? [], + verification: d.verification ?? { date: null, status: 'unknown' }, }, } }, outputs: { - email: { - type: 'string', - description: 'The found email address', - }, + first_name: { type: 'string', description: "Person's first name" }, + last_name: { type: 'string', description: "Person's last name" }, + email: { type: 'string', description: 'The found email address' }, score: { type: 'number', description: 'Confidence score (0-100) for the found email address', }, + domain: { type: 'string', description: 'Domain that was searched' }, + accept_all: { + type: 'boolean', + description: 'Whether the server accepts all email addresses (may cause false positives)', + }, + position: { type: 'string', description: 'Job title/position', optional: true }, + twitter: { type: 'string', description: 'Twitter handle', optional: true }, + linkedin_url: { type: 'string', description: 'LinkedIn profile URL', optional: true }, + phone_number: { type: 'string', description: 'Phone number', optional: true }, + company: { type: 'string', description: 'Company name', optional: true }, sources: SOURCES_OUTPUT, verification: VERIFICATION_OUTPUT, }, diff --git a/apps/sim/tools/hunter/hunter.test.ts b/apps/sim/tools/hunter/hunter.test.ts new file mode 100644 index 00000000000..f3b12d23a71 --- /dev/null +++ b/apps/sim/tools/hunter/hunter.test.ts @@ -0,0 +1,269 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it } from 'vitest' +import { companiesFindTool } from '@/tools/hunter/companies_find' +import { discoverTool } from '@/tools/hunter/discover' +import { domainSearchTool } from '@/tools/hunter/domain_search' +import { emailFinderTool } from '@/tools/hunter/email_finder' + +const respond = (body: unknown) => new Response(JSON.stringify(body)) + +describe('hunter domain_search', () => { + const transform = domainSearchTool.transformResponse! + + it('maps the documented response shape', async () => { + const result = await transform( + respond({ + data: { + domain: 'stripe.com', + disposable: false, + webmail: false, + accept_all: true, + pattern: '{first}', + organization: 'Stripe', + linked_domains: ['stripe.io'], + emails: [ + { + value: 'patrick@stripe.com', + type: 'personal', + confidence: 92, + first_name: 'Patrick', + last_name: 'Collison', + position: 'CEO', + seniority: 'executive', + department: 'executive', + linkedin: null, + twitter: 'patrickc', + phone_number: null, + sources: [], + verification: { date: '2024-01-01', status: 'valid' }, + }, + ], + }, + }) + ) + + expect(result.success).toBe(true) + expect(result.output.domain).toBe('stripe.com') + expect(result.output.linked_domains).toEqual(['stripe.io']) + expect(result.output.emails).toHaveLength(1) + expect(result.output.emails[0]).toMatchObject({ + value: 'patrick@stripe.com', + first_name: 'Patrick', + twitter: 'patrickc', + verification: { status: 'valid' }, + }) + }) + + it('returns safe defaults when fields are missing', async () => { + const result = await transform(respond({ data: null })) + expect(result.output).toMatchObject({ + domain: '', + disposable: false, + webmail: false, + accept_all: false, + pattern: '', + organization: '', + linked_domains: [], + emails: [], + }) + }) + + it('nullifies missing optional email fields', async () => { + const result = await transform( + respond({ + data: { + emails: [{ value: 'a@b.com', type: 'generic', confidence: 50 }], + }, + }) + ) + expect(result.output.emails[0]).toMatchObject({ + first_name: null, + last_name: null, + position: null, + linkedin: null, + verification: { status: 'unknown' }, + }) + }) +}) + +describe('hunter email_finder', () => { + const transform = emailFinderTool.transformResponse! + + it('extracts the documented finder fields', async () => { + const result = await transform( + respond({ + data: { + first_name: 'Alex', + last_name: 'Smith', + email: 'alex@acme.com', + score: 85, + domain: 'acme.com', + accept_all: false, + position: 'Engineer', + twitter: null, + linkedin_url: 'https://linkedin.com/in/alex', + phone_number: null, + company: 'Acme', + sources: [], + verification: { date: null, status: 'valid' }, + }, + }) + ) + + expect(result.output).toMatchObject({ + first_name: 'Alex', + email: 'alex@acme.com', + score: 85, + linkedin_url: 'https://linkedin.com/in/alex', + company: 'Acme', + verification: { status: 'valid' }, + }) + }) + + it('falls back to safe defaults', async () => { + const result = await transform(respond({ data: {} })) + expect(result.output).toMatchObject({ + email: '', + score: 0, + accept_all: false, + sources: [], + verification: { date: null, status: 'unknown' }, + }) + }) +}) + +describe('hunter discover', () => { + const transform = discoverTool.transformResponse! + + it('maps documented data array shape', async () => { + const result = await transform( + respond({ + data: [ + { + domain: 'hunter.io', + organization: 'Hunter', + emails_count: { personal: 23, generic: 5, total: 28 }, + }, + ], + }) + ) + + expect(result.output.results).toEqual([ + { + domain: 'hunter.io', + organization: 'Hunter', + personal_emails: 23, + generic_emails: 5, + total_emails: 28, + }, + ]) + }) + + it('returns empty array when data is missing', async () => { + const result = await transform(respond({})) + expect(result.output.results).toEqual([]) + }) + + it('falls back to zero counts when emails_count is missing', async () => { + const result = await transform( + respond({ data: [{ domain: 'acme.com', organization: 'Acme' }] }) + ) + expect(result.output.results[0]).toEqual({ + domain: 'acme.com', + organization: 'Acme', + personal_emails: 0, + generic_emails: 0, + total_emails: 0, + }) + }) + + it('throws when no search params provided', () => { + const buildUrl = discoverTool.request.url as (p: Record) => string + expect(() => buildUrl({ apiKey: 'k' })).toThrow(/At least one search parameter/) + }) + + it('builds body per docs (headcount as plain array, technology wrapped)', () => { + const buildBody = discoverTool.request.body as ( + p: Record + ) => Record + const body = buildBody({ apiKey: 'k', headcount: '11-50', technology: 'react' }) + expect(body).toEqual({ + headcount: ['11-50'], + technology: { include: ['react'] }, + }) + }) +}) + +describe('hunter companies_find', () => { + const transform = companiesFindTool.transformResponse! + + it('flattens nested company fields', async () => { + const result = await transform( + respond({ + data: { + name: 'Stripe', + domain: 'stripe.com', + description: 'Payments', + category: { industry: 'Fintech', sector: 'Software' }, + metrics: { employees: '1000+' }, + foundedYear: 2010, + location: 'San Francisco, CA', + geo: { country: 'United States', countryCode: 'US', state: 'CA', city: 'SF' }, + linkedin: { handle: 'company/stripe' }, + twitter: { handle: 'stripe' }, + facebook: { handle: 'stripe' }, + logo: 'https://logo.png', + phone: '+1-555', + tech: ['react', 'node'], + }, + }) + ) + + expect(result.output).toEqual({ + name: 'Stripe', + domain: 'stripe.com', + description: 'Payments', + industry: 'Fintech', + sector: 'Software', + size: '1000+', + founded_year: 2010, + location: 'San Francisco, CA', + country: 'United States', + country_code: 'US', + state: 'CA', + city: 'SF', + linkedin: 'company/stripe', + twitter: 'stripe', + facebook: 'stripe', + logo: 'https://logo.png', + phone: '+1-555', + tech: ['react', 'node'], + }) + }) + + it('prefers employeesRange and coerces numeric employees', async () => { + const rangeResult = await transform( + respond({ data: { metrics: { employees: 5432, employeesRange: '1001-5000' } } }) + ) + expect(rangeResult.output.size).toBe('1001-5000') + + const numericResult = await transform(respond({ data: { metrics: { employees: 5432 } } })) + expect(numericResult.output.size).toBe('5432') + }) + + it('survives missing nested objects', async () => { + const result = await transform(respond({ data: {} })) + expect(result.output).toMatchObject({ + name: '', + industry: '', + sector: '', + size: '', + country: '', + linkedin: '', + tech: [], + founded_year: null, + }) + }) +}) diff --git a/apps/sim/tools/hunter/types.ts b/apps/sim/tools/hunter/types.ts index 4751925a3d7..a55cb6a2795 100644 --- a/apps/sim/tools/hunter/types.ts +++ b/apps/sim/tools/hunter/types.ts @@ -71,14 +71,20 @@ export const EMAIL_OUTPUT_PROPERTIES = { type: 'number', description: 'Probability score (0-100) that the email is correct', }, - first_name: { type: 'string', description: "Person's first name" }, - last_name: { type: 'string', description: "Person's last name" }, - position: { type: 'string', description: 'Job title/position' }, - seniority: { type: 'string', description: 'Seniority level (junior, senior, executive)' }, + first_name: { type: 'string', description: "Person's first name", optional: true }, + last_name: { type: 'string', description: "Person's last name", optional: true }, + position: { type: 'string', description: 'Job title/position', optional: true }, + position_raw: { type: 'string', description: 'Raw job title as found', optional: true }, + seniority: { + type: 'string', + description: 'Seniority level (junior, senior, executive)', + optional: true, + }, department: { type: 'string', description: - 'Department (executive, it, finance, management, sales, legal, support, hr, marketing, communication)', + 'Department (executive, it, finance, management, sales, legal, support, hr, marketing, communication, education, design, health, operations)', + optional: true, }, linkedin: { type: 'string', description: 'LinkedIn profile URL', optional: true }, twitter: { type: 'string', description: 'Twitter handle', optional: true }, @@ -147,36 +153,16 @@ export const SENIORITY_OUTPUT: OutputProperty = { } /** - * Output definition for emails_count object in discover results - */ -export const EMAILS_COUNT_OUTPUT_PROPERTIES = { - personal: { type: 'number', description: 'Number of personal email addresses' }, - generic: { type: 'number', description: 'Number of generic/role-based email addresses' }, - total: { type: 'number', description: 'Total number of email addresses' }, -} as const satisfies Record - -/** - * Complete emails_count object output definition - */ -export const EMAILS_COUNT_OUTPUT: OutputProperty = { - type: 'object', - description: 'Email count breakdown', - properties: EMAILS_COUNT_OUTPUT_PROPERTIES, -} - -/** - * Output definition for discover result company objects + * Output definition for discover result company objects. + * Hunter Discover returns minimal info per company — use Domain Search or + * Company Enrichment for richer data on a specific result. */ export const DISCOVER_RESULT_OUTPUT_PROPERTIES = { domain: { type: 'string', description: 'Company domain' }, - name: { type: 'string', description: 'Company/organization name' }, - headcount: { type: 'number', description: 'Company size/headcount', optional: true }, - technologies: { - type: 'array', - description: 'Technologies used by the company', - items: { type: 'string', description: 'Technology name' }, - }, - email_count: { type: 'number', description: 'Total number of email addresses found' }, + organization: { type: 'string', description: 'Organization name' }, + personal_emails: { type: 'number', description: 'Count of personal emails' }, + generic_emails: { type: 'number', description: 'Count of generic (role-based) emails' }, + total_emails: { type: 'number', description: 'Total emails found for the company' }, } as const satisfies Record /** @@ -191,28 +177,6 @@ export const DISCOVER_RESULTS_OUTPUT: OutputProperty = { }, } -/** - * Output definition for company enrichment objects - */ -export const COMPANY_OUTPUT_PROPERTIES = { - name: { type: 'string', description: 'Company name' }, - domain: { type: 'string', description: 'Company domain' }, - industry: { type: 'string', description: 'Industry classification' }, - size: { type: 'string', description: 'Company size/headcount range' }, - country: { type: 'string', description: 'Country where the company is located' }, - linkedin: { type: 'string', description: 'LinkedIn company page URL', optional: true }, - twitter: { type: 'string', description: 'Twitter handle', optional: true }, -} as const satisfies Record - -/** - * Complete company object output definition - */ -export const COMPANY_OUTPUT: OutputProperty = { - type: 'object', - description: 'Company information', - properties: COMPANY_OUTPUT_PROPERTIES, -} - // Common parameters for all Hunter.io tools export interface HunterBaseParams { apiKey: string @@ -229,10 +193,10 @@ export interface HunterDiscoverParams extends HunterBaseParams { export interface HunterDiscoverResult { domain: string - name: string - headcount?: number - technologies?: string[] - email_count?: number + organization: string + personal_emails: number + generic_emails: number + total_emails: number } export interface HunterDiscoverResponse extends ToolResponse { @@ -262,16 +226,17 @@ export interface HunterEmail { last_seen_on: string still_on_page: boolean }> - first_name: string - last_name: string - position: string - seniority: string - department: string - linkedin: string - twitter: string - phone_number: string + first_name: string | null + last_name: string | null + position: string | null + position_raw: string | null + seniority: string | null + department: string | null + linkedin: string | null + twitter: string | null + phone_number: string | null verification: { - date: string + date: string | null status: string } } @@ -284,19 +249,7 @@ export interface HunterDomainSearchResponse extends ToolResponse { accept_all: boolean pattern: string organization: string - description: string - industry: string - twitter: string - facebook: string - linkedin: string - instagram: string - youtube: string - technologies: string[] - country: string - state: string - city: string - postal_code: string - street: string + linked_domains: string[] emails: HunterEmail[] } } @@ -311,8 +264,17 @@ export interface HunterEmailFinderParams extends HunterBaseParams { export interface HunterEmailFinderResponse extends ToolResponse { output: { + first_name: string + last_name: string email: string score: number + domain: string + accept_all: boolean + position: string | null + twitter: string | null + linkedin_url: string | null + phone_number: string | null + company: string | null sources: Array<{ domain: string uri: string @@ -321,7 +283,7 @@ export interface HunterEmailFinderResponse extends ToolResponse { still_on_page: boolean }> verification: { - date: string + date: string | null status: string } } @@ -366,26 +328,24 @@ export interface HunterEnrichmentParams extends HunterBaseParams { export interface HunterEnrichmentResponse extends ToolResponse { output: { - person?: { - first_name: string - last_name: string - email: string - position: string - seniority: string - department: string - linkedin: string - twitter: string - phone_number: string - } - company?: { - name: string - domain: string - industry: string - size: string - country: string - linkedin: string - twitter: string - } + name: string + domain: string + description: string + industry: string + sector: string + size: string + founded_year: number | null + location: string + country: string + country_code: string + state: string + city: string + linkedin: string + twitter: string + facebook: string + logo: string + phone: string + tech: string[] } } From 6a927c9cd39550545db5da7660503fa9d0fa5cb7 Mon Sep 17 00:00:00 2001 From: Waleed Date: Thu, 7 May 2026 20:01:51 -0700 Subject: [PATCH 14/33] fix(tables): optimistic updates for column delete/update (#4512) * fix(tables): optimistic updates for column delete/update Add onMutate/onError to useDeleteColumn and useUpdateColumn so column deletes feel instant on large tables (no flash-back during the JSONB rewrite) and concurrent type changes don't race the in-flight delete's invalidation. Co-Authored-By: Claude Opus 4.7 * fix(tables): toast on delete-column failure, case-insensitive row cleanup, rename row-data keys Address greptile review: - useDeleteColumn now toasts on non-validation errors so users see when the column "snaps back" after a server/network failure - Row data cleanup matches keys case-insensitively in both useDeleteColumn and useUpdateColumn so a column stored as "Age" is cleaned even when the request uses "age" - useUpdateColumn now migrates row-data keys when updates.name is set, preventing blank cells during the server round-trip on a rename Co-Authored-By: Claude Opus 4.7 --------- Co-authored-by: Claude Opus 4.7 --- apps/sim/hooks/queries/tables.test.ts | 251 ++++++++++++++++++++++++++ apps/sim/hooks/queries/tables.ts | 84 ++++++++- 2 files changed, 334 insertions(+), 1 deletion(-) create mode 100644 apps/sim/hooks/queries/tables.test.ts diff --git a/apps/sim/hooks/queries/tables.test.ts b/apps/sim/hooks/queries/tables.test.ts new file mode 100644 index 00000000000..c2c8a12724b --- /dev/null +++ b/apps/sim/hooks/queries/tables.test.ts @@ -0,0 +1,251 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { queryClient, cacheStore } = vi.hoisted(() => { + const cache = new Map() + return { + cacheStore: cache, + queryClient: { + cancelQueries: vi.fn().mockResolvedValue(undefined), + invalidateQueries: vi.fn().mockResolvedValue(undefined), + getQueryData: vi.fn((key: readonly unknown[]) => cache.get(JSON.stringify(key))), + setQueryData: vi.fn((key: readonly unknown[], updater: unknown) => { + const k = JSON.stringify(key) + const prev = cache.get(k) + const next = + typeof updater === 'function' ? (updater as (p: unknown) => unknown)(prev) : updater + cache.set(k, next) + return next + }), + getQueriesData: vi.fn((opts: { queryKey: readonly unknown[] }) => { + const prefix = JSON.stringify(opts.queryKey).slice(0, -1) + return [...cache.entries()] + .filter(([k]) => k.startsWith(prefix)) + .map(([k, v]) => [JSON.parse(k), v]) + }), + }, + } +}) + +vi.mock('@tanstack/react-query', () => ({ + keepPreviousData: {}, + useQuery: vi.fn(), + useInfiniteQuery: vi.fn(), + useQueryClient: vi.fn(() => queryClient), + useMutation: vi.fn((options) => options), +})) + +vi.mock('@/lib/api/client/request', () => ({ + requestJson: vi.fn(), +})) + +vi.mock('@/lib/api/client/errors', () => ({ + isValidationError: vi.fn(() => false), +})) + +vi.mock('@/lib/api/contracts/tables', () => ({ + addTableColumnContract: {}, + addWorkflowGroupContract: {}, + batchCreateTableRowsContract: {}, + batchUpdateTableRowsContract: {}, + cancelTableRunsContract: {}, + createTableContract: {}, + createTableRowContract: {}, + deleteTableColumnContract: {}, + deleteTableContract: {}, + deleteTableRowContract: {}, + deleteTableRowsContract: {}, + deleteWorkflowGroupContract: {}, + getTableContract: {}, + importCsvContract: {}, + listTableRowsContract: {}, + listTablesContract: {}, + renameTableContract: {}, + restoreTableContract: {}, + runWorkflowGroupContract: {}, + updateTableColumnContract: {}, + updateTableMetadataContract: {}, + updateTableRowContract: {}, + updateWorkflowGroupContract: {}, + uploadCsvContract: {}, +})) + +vi.mock('@/app/workspace/providers/socket-provider', () => ({ + useSocket: vi.fn(() => ({ socket: null })), +})) + +vi.mock('@/components/emcn', () => ({ + toast: { error: vi.fn(), success: vi.fn() }, +})) + +import { tableKeys, useDeleteColumn, useUpdateColumn } from '@/hooks/queries/tables' + +const TABLE_ID = 'tbl-1' +const WORKSPACE_ID = 'ws-1' + +function setCache(key: readonly unknown[], value: unknown) { + cacheStore.set(JSON.stringify(key), value) +} + +function getCache(key: readonly unknown[]): T | undefined { + return cacheStore.get(JSON.stringify(key)) as T | undefined +} + +beforeEach(() => { + cacheStore.clear() + vi.clearAllMocks() +}) + +describe('useDeleteColumn optimistic update', () => { + it('removes column from schema cache, strips its width, and clears it from row data', async () => { + setCache(tableKeys.detail(TABLE_ID), { + id: TABLE_ID, + schema: { + columns: [ + { name: 'name', type: 'string' }, + { name: 'age', type: 'number' }, + ], + }, + metadata: { + columnWidths: { name: 200, age: 100 }, + }, + }) + setCache(tableKeys.rowsRoot(TABLE_ID), { + rows: [ + { id: 'r1', data: { name: 'a', age: 1 } }, + { id: 'r2', data: { name: 'b', age: 2 } }, + ], + totalCount: 2, + }) + + const hook = useDeleteColumn({ workspaceId: WORKSPACE_ID, tableId: TABLE_ID }) + const ctx = await hook.onMutate?.('age') + + const detail = getCache<{ + schema: { columns: Array<{ name: string }> } + metadata: { columnWidths: Record } + }>(tableKeys.detail(TABLE_ID)) + expect(detail?.schema.columns.map((c) => c.name)).toEqual(['name']) + expect(detail?.metadata.columnWidths).toEqual({ name: 200 }) + + const rows = getCache<{ rows: Array<{ data: Record }> }>( + tableKeys.rowsRoot(TABLE_ID) + ) + expect(rows?.rows.every((r) => !('age' in r.data))).toBe(true) + expect(rows?.rows[0]?.data).toEqual({ name: 'a' }) + + expect(ctx?.previousDetail).toBeDefined() + expect(ctx?.rowSnapshots?.length).toBeGreaterThan(0) + }) + + it('rolls back schema and rows on error using snapshots', async () => { + const originalDetail = { + id: TABLE_ID, + schema: { columns: [{ name: 'name' }, { name: 'age' }] }, + metadata: { columnWidths: { name: 200, age: 100 } }, + } + const originalRows = { + rows: [{ id: 'r1', data: { name: 'a', age: 1 } }], + totalCount: 1, + } + setCache(tableKeys.detail(TABLE_ID), originalDetail) + setCache(tableKeys.rowsRoot(TABLE_ID), originalRows) + + const hook = useDeleteColumn({ workspaceId: WORKSPACE_ID, tableId: TABLE_ID }) + const ctx = await hook.onMutate?.('age') + + expect(getCache(tableKeys.detail(TABLE_ID))).not.toEqual(originalDetail) + + hook.onError?.(new Error('boom'), 'age', ctx) + + expect(getCache(tableKeys.detail(TABLE_ID))).toEqual(originalDetail) + expect(getCache(tableKeys.rowsRoot(TABLE_ID))).toEqual(originalRows) + }) + + it('invalidates schema, rows, and lists in onSettled', () => { + const hook = useDeleteColumn({ workspaceId: WORKSPACE_ID, tableId: TABLE_ID }) + hook.onSettled?.(undefined, null, 'age', undefined) + + const calls = queryClient.invalidateQueries.mock.calls.map((c) => c[0]?.queryKey) + expect(calls).toEqual( + expect.arrayContaining([ + tableKeys.detail(TABLE_ID), + tableKeys.rowsRoot(TABLE_ID), + tableKeys.lists(), + ]) + ) + }) +}) + +describe('useUpdateColumn optimistic update', () => { + it('writes the column update to the schema cache and rolls back on error', async () => { + const original = { + id: TABLE_ID, + schema: { + columns: [ + { name: 'name', type: 'string' }, + { name: 'age', type: 'string' }, + ], + }, + } + setCache(tableKeys.detail(TABLE_ID), original) + + const hook = useUpdateColumn({ workspaceId: WORKSPACE_ID, tableId: TABLE_ID }) + const ctx = await hook.onMutate?.({ columnName: 'age', updates: { type: 'number' } }) + + const after = getCache<{ schema: { columns: Array<{ name: string; type: string }> } }>( + tableKeys.detail(TABLE_ID) + ) + expect(after?.schema.columns.find((c) => c.name === 'age')?.type).toBe('number') + + hook.onError?.(new Error('boom'), { columnName: 'age', updates: { type: 'number' } }, ctx) + + expect(getCache(tableKeys.detail(TABLE_ID))).toEqual(original) + }) + + it('renames the corresponding row-data key when updates.name is set', async () => { + setCache(tableKeys.detail(TABLE_ID), { + id: TABLE_ID, + schema: { columns: [{ name: 'age', type: 'number' }] }, + }) + setCache(tableKeys.rowsRoot(TABLE_ID), { + rows: [ + { id: 'r1', data: { age: 30 } }, + { id: 'r2', data: { age: 40 } }, + ], + totalCount: 2, + }) + + const hook = useUpdateColumn({ workspaceId: WORKSPACE_ID, tableId: TABLE_ID }) + await hook.onMutate?.({ columnName: 'age', updates: { name: 'years' } }) + + const rows = getCache<{ rows: Array<{ data: Record }> }>( + tableKeys.rowsRoot(TABLE_ID) + ) + expect(rows?.rows[0]?.data).toEqual({ years: 30 }) + expect(rows?.rows[1]?.data).toEqual({ years: 40 }) + }) +}) + +describe('useDeleteColumn case-insensitive row cleanup', () => { + it('strips the row data key even when stored casing differs from the requested name', async () => { + setCache(tableKeys.detail(TABLE_ID), { + id: TABLE_ID, + schema: { columns: [{ name: 'Age', type: 'number' }] }, + }) + setCache(tableKeys.rowsRoot(TABLE_ID), { + rows: [{ id: 'r1', data: { Age: 30, name: 'a' } }], + totalCount: 1, + }) + + const hook = useDeleteColumn({ workspaceId: WORKSPACE_ID, tableId: TABLE_ID }) + await hook.onMutate?.('age') + + const rows = getCache<{ rows: Array<{ data: Record }> }>( + tableKeys.rowsRoot(TABLE_ID) + ) + expect(rows?.rows[0]?.data).toEqual({ name: 'a' }) + }) +}) diff --git a/apps/sim/hooks/queries/tables.ts b/apps/sim/hooks/queries/tables.ts index e2cf11ac08c..770e0f82ee9 100644 --- a/apps/sim/hooks/queries/tables.ts +++ b/apps/sim/hooks/queries/tables.ts @@ -872,7 +872,43 @@ export function useUpdateColumn({ workspaceId, tableId }: RowMutationContext) { body: { workspaceId, columnName, updates }, }) }, - onError: (error) => { + onMutate: async ({ columnName, updates }) => { + await queryClient.cancelQueries({ queryKey: tableKeys.detail(tableId) }) + const previousDetail = queryClient.getQueryData(tableKeys.detail(tableId)) + if (previousDetail) { + const lower = columnName.toLowerCase() + const nextColumns = previousDetail.schema.columns.map((c) => + c.name.toLowerCase() === lower ? { ...c, ...updates } : c + ) + queryClient.setQueryData(tableKeys.detail(tableId), { + ...previousDetail, + schema: { ...previousDetail.schema, columns: nextColumns }, + }) + } + + const newName = (updates as { name?: string }).name + const rowSnapshots = + typeof newName === 'string' && newName.length > 0 && newName !== columnName + ? await snapshotAndMutateRows(queryClient, tableId, (row) => { + const lower = columnName.toLowerCase() + const matchKey = Object.keys(row.data).find((k) => k.toLowerCase() === lower) + if (!matchKey) return null + const { [matchKey]: value, ...rest } = row.data + return { ...row, data: { ...rest, [newName]: value } } + }) + : [] + + return { previousDetail, rowSnapshots } + }, + onError: (error, _vars, context) => { + if (context?.previousDetail) { + queryClient.setQueryData(tableKeys.detail(tableId), context.previousDetail) + } + if (context?.rowSnapshots) { + for (const [key, data] of context.rowSnapshots) { + queryClient.setQueryData(key, data) + } + } // Validation errors are surfaced as inline FieldErrors by the caller. if (isValidationError(error)) return toast.error(error.message, { duration: 5000 }) @@ -1152,6 +1188,52 @@ export function useDeleteColumn({ workspaceId, tableId }: RowMutationContext) { body: { workspaceId, columnName }, }) }, + onMutate: async (columnName) => { + await queryClient.cancelQueries({ queryKey: tableKeys.detail(tableId) }) + + const lower = columnName.toLowerCase() + const previousDetail = queryClient.getQueryData(tableKeys.detail(tableId)) + if (previousDetail) { + const nextColumns = previousDetail.schema.columns.filter( + (c) => c.name.toLowerCase() !== lower + ) + const prevWidths = previousDetail.metadata?.columnWidths + const nextMetadata = prevWidths + ? { + ...previousDetail.metadata, + columnWidths: Object.fromEntries( + Object.entries(prevWidths).filter(([k]) => k.toLowerCase() !== lower) + ), + } + : previousDetail.metadata + queryClient.setQueryData(tableKeys.detail(tableId), { + ...previousDetail, + schema: { ...previousDetail.schema, columns: nextColumns }, + metadata: nextMetadata, + }) + } + + const rowSnapshots = await snapshotAndMutateRows(queryClient, tableId, (row) => { + const matchKey = Object.keys(row.data).find((k) => k.toLowerCase() === lower) + if (!matchKey) return null + const { [matchKey]: _removed, ...rest } = row.data + return { ...row, data: rest } + }) + + return { previousDetail, rowSnapshots } + }, + onError: (error, _columnName, context) => { + if (context?.previousDetail) { + queryClient.setQueryData(tableKeys.detail(tableId), context.previousDetail) + } + if (context?.rowSnapshots) { + for (const [key, data] of context.rowSnapshots) { + queryClient.setQueryData(key, data) + } + } + if (isValidationError(error)) return + toast.error(error.message, { duration: 5000 }) + }, onSettled: () => { invalidateTableSchema(queryClient, tableId) }, From 11563cfbc269e077d82b24e57433846a568cd87a Mon Sep 17 00:00:00 2001 From: Waleed Date: Thu, 7 May 2026 20:18:00 -0700 Subject: [PATCH 15/33] improvement(uploads): migrate remaining FormData uploads to presigned PUT (#4509) * fix(uploads): switch mothership uploads to presigned PUT pattern * fix(uploads): drop unreachable size guard in mothership presigned branch * improvement(uploads): migrate profile-picture and workspace-logo uploads to presigned PUT * improvement(uploads): migrate workflow file-upload sub-block to presigned PUT * improvement(uploads): migrate execution-trigger file uploads to presigned PUT * fix(uploads): tighten permission checks and fix multipart customKey for mothership/execution * fix(uploads): require workspace write+ for execution presigned, admin-only for workspace-logos, suppress doubled error toast * fix(uploads): skip per-file invalidation in batch + extract shared API fallback - Add skipInvalidation flag to useUploadWorkspaceFile; file-upload sub-block now invalidates once after the batch instead of per file - Extract uploadViaApiFallback to lib/uploads/client/api-fallback.ts (DRY across 3 hooks) --- apps/sim/app/api/files/multipart/route.ts | 24 ++++ apps/sim/app/api/files/presigned/route.ts | 108 ++++++++++++++- .../hooks/use-profile-picture-upload.ts | 47 +++---- .../user-input/hooks/use-file-attachments.ts | 61 ++++----- .../components/file-upload/file-upload.tsx | 66 +++------- .../hooks/use-workflow-execution.ts | 124 ++++++++++-------- .../hooks/use-workspace-logo-upload.ts | 44 ++++--- apps/sim/hooks/queries/workspace-files.ts | 12 +- .../sim/lib/api/contracts/storage-transfer.ts | 10 +- apps/sim/lib/uploads/client/api-fallback.ts | 42 ++++++ apps/sim/lib/uploads/client/direct-upload.ts | 41 +++++- bun.lock | 1 + 12 files changed, 392 insertions(+), 188 deletions(-) create mode 100644 apps/sim/lib/uploads/client/api-fallback.ts diff --git a/apps/sim/app/api/files/multipart/route.ts b/apps/sim/app/api/files/multipart/route.ts index 80213d54cbb..e61cbd543a8 100644 --- a/apps/sim/app/api/files/multipart/route.ts +++ b/apps/sim/app/api/files/multipart/route.ts @@ -158,6 +158,30 @@ export const POST = withRouteHandler(async (request: NextRequest) => { { status: 413 } ) } + } else if (context === 'mothership') { + const { generateWorkspaceFileKey } = await import( + '@/lib/uploads/contexts/workspace/workspace-file-manager' + ) + customKey = generateWorkspaceFileKey(workspaceId, fileName) + } else if (context === 'execution') { + const workflowId = (data as { workflowId?: unknown }).workflowId + const executionId = (data as { executionId?: unknown }).executionId + if (typeof workflowId !== 'string' || !workflowId.trim()) { + return NextResponse.json( + { error: 'workflowId is required for execution uploads' }, + { status: 400 } + ) + } + if (typeof executionId !== 'string' || !executionId.trim()) { + return NextResponse.json( + { error: 'executionId is required for execution uploads' }, + { status: 400 } + ) + } + const { generateExecutionFileKey } = await import( + '@/lib/uploads/contexts/execution/utils' + ) + customKey = generateExecutionFileKey({ workspaceId, workflowId, executionId }, fileName) } let uploadId: string diff --git a/apps/sim/app/api/files/presigned/route.ts b/apps/sim/app/api/files/presigned/route.ts index f6c22bc4a5d..c8fb824b3c9 100644 --- a/apps/sim/app/api/files/presigned/route.ts +++ b/apps/sim/app/api/files/presigned/route.ts @@ -7,14 +7,25 @@ import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { CopilotFiles } from '@/lib/uploads' import type { StorageContext } from '@/lib/uploads/config' import { USE_BLOB_STORAGE } from '@/lib/uploads/config' +import { generateExecutionFileKey } from '@/lib/uploads/contexts/execution/utils' +import { generateWorkspaceFileKey } from '@/lib/uploads/contexts/workspace/workspace-file-manager' import { generatePresignedUploadUrl, hasCloudStorage } from '@/lib/uploads/core/storage-service' import { isImageFileType } from '@/lib/uploads/utils/file-utils' import { validateFileType } from '@/lib/uploads/utils/validation' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { createErrorResponse } from '@/app/api/files/utils' const logger = createLogger('PresignedUploadAPI') -const VALID_UPLOAD_TYPES = ['knowledge-base', 'chat', 'copilot', 'profile-pictures'] as const +const VALID_UPLOAD_TYPES = [ + 'knowledge-base', + 'chat', + 'copilot', + 'profile-pictures', + 'mothership', + 'workspace-logos', + 'execution', +] as const class PresignedUrlError extends Error { constructor( @@ -116,6 +127,101 @@ export const POST = withRouteHandler(async (request: NextRequest) => { error instanceof Error ? error.message : 'Copilot validation failed' ) } + } else if (uploadType === 'mothership') { + const workspaceId = request.nextUrl.searchParams.get('workspaceId') + if (!workspaceId?.trim()) { + throw new ValidationError('workspaceId query parameter is required for mothership uploads') + } + + const permission = await getUserEntityPermissions(sessionUserId, 'workspace', workspaceId) + if (permission !== 'write' && permission !== 'admin') { + return NextResponse.json( + { error: 'Write or Admin access required for mothership uploads' }, + { status: 403 } + ) + } + + const fileValidationError = validateFileType(fileName, contentType) + if (fileValidationError) { + throw new ValidationError(fileValidationError.message) + } + + const customKey = generateWorkspaceFileKey(workspaceId, fileName) + presignedUrlResponse = await generatePresignedUploadUrl({ + fileName, + contentType, + fileSize, + context: 'mothership', + userId: sessionUserId, + customKey, + expirationSeconds: 3600, + metadata: { workspaceId }, + }) + } else if (uploadType === 'execution') { + const workflowId = request.nextUrl.searchParams.get('workflowId') + const executionId = request.nextUrl.searchParams.get('executionId') + const workspaceId = request.nextUrl.searchParams.get('workspaceId') + if (!workflowId?.trim() || !executionId?.trim() || !workspaceId?.trim()) { + throw new ValidationError( + 'workflowId, executionId, and workspaceId query parameters are required for execution uploads' + ) + } + + const permission = await getUserEntityPermissions(sessionUserId, 'workspace', workspaceId) + if (permission !== 'write' && permission !== 'admin') { + return NextResponse.json( + { error: 'Write or Admin access required for execution uploads' }, + { status: 403 } + ) + } + + const fileValidationError = validateFileType(fileName, contentType) + if (fileValidationError) { + throw new ValidationError(fileValidationError.message) + } + + const customKey = generateExecutionFileKey({ workspaceId, workflowId, executionId }, fileName) + presignedUrlResponse = await generatePresignedUploadUrl({ + fileName, + contentType, + fileSize, + context: 'execution', + userId: sessionUserId, + customKey, + expirationSeconds: 3600, + metadata: { workspaceId, workflowId, executionId }, + }) + } else if (uploadType === 'workspace-logos') { + const workspaceId = request.nextUrl.searchParams.get('workspaceId') + if (!workspaceId?.trim()) { + throw new ValidationError( + 'workspaceId query parameter is required for workspace-logos uploads' + ) + } + + const permission = await getUserEntityPermissions(sessionUserId, 'workspace', workspaceId) + if (permission !== 'admin') { + return NextResponse.json( + { error: 'Admin access required for workspace logo uploads' }, + { status: 403 } + ) + } + + if (!isImageFileType(contentType)) { + throw new ValidationError( + 'Only image files (JPEG, PNG, GIF, WebP, SVG) are allowed for workspace logo uploads' + ) + } + + presignedUrlResponse = await generatePresignedUploadUrl({ + fileName, + contentType, + fileSize, + context: 'workspace-logos', + userId: sessionUserId, + expirationSeconds: 3600, + metadata: { workspaceId }, + }) } else { if (uploadType === 'profile-pictures') { if (!sessionUserId?.trim()) { diff --git a/apps/sim/app/workspace/[workspaceId]/settings/hooks/use-profile-picture-upload.ts b/apps/sim/app/workspace/[workspaceId]/settings/hooks/use-profile-picture-upload.ts index 3c910a128bb..e0143fffab7 100644 --- a/apps/sim/app/workspace/[workspaceId]/settings/hooks/use-profile-picture-upload.ts +++ b/apps/sim/app/workspace/[workspaceId]/settings/hooks/use-profile-picture-upload.ts @@ -1,6 +1,7 @@ import { useCallback, useEffect, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import type { StorageContext } from '@/lib/uploads/shared/types' +import { uploadViaApiFallback } from '@/lib/uploads/client/api-fallback' +import { DirectUploadError, runUploadStrategy } from '@/lib/uploads/client/direct-upload' const logger = createLogger('ProfilePictureUpload') const MAX_FILE_SIZE = 5 * 1024 * 1024 // 5MB @@ -10,7 +11,7 @@ interface UseProfilePictureUploadProps { onUpload?: (url: string | null) => void onError?: (error: string) => void currentImage?: string | null - context?: StorageContext + context?: 'profile-pictures' | 'workspace-logos' workspaceId?: string } @@ -64,33 +65,27 @@ export function useProfilePictureUpload({ const uploadFileToServer = useCallback( async (file: File): Promise => { - try { - const formData = new FormData() - formData.append('file', file) - formData.append('context', context) - if (workspaceId) { - formData.append('workspaceId', workspaceId) - } + const presignedEndpoint = + context === 'workspace-logos' && workspaceId + ? `/api/files/presigned?type=workspace-logos&workspaceId=${encodeURIComponent(workspaceId)}` + : `/api/files/presigned?type=${context}` - // boundary-raw-fetch: multipart/form-data upload (FileUpload boundary), incompatible with requestJson which JSON-stringifies bodies - const response = await fetch('/api/files/upload', { - method: 'POST', - body: formData, + try { + const result = await runUploadStrategy({ + file, + workspaceId: workspaceId ?? '', + context, + presignedEndpoint, }) - - if (!response.ok) { - const errorData = await response.json().catch(() => ({ message: response.statusText })) - throw new Error( - errorData.message || errorData.error || `Failed to upload file: ${response.status}` - ) - } - - const data = await response.json() - const publicUrl = data.fileInfo?.path || data.path || data.url - logger.info(`Profile picture uploaded successfully via server upload: ${publicUrl}`) - return publicUrl + logger.info(`${context} uploaded successfully: ${result.path}`) + return result.path } catch (error) { - throw new Error(error instanceof Error ? error.message : 'Failed to upload profile picture') + if (error instanceof DirectUploadError && error.code === 'FALLBACK_REQUIRED') { + const { path } = await uploadViaApiFallback(file, context, workspaceId) + logger.info(`${context} uploaded successfully via API fallback: ${path}`) + return path + } + throw error } }, [context, workspaceId] diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-file-attachments.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-file-attachments.ts index 9c09054d5ba..57d808c1f03 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-file-attachments.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-file-attachments.ts @@ -5,6 +5,8 @@ import { createLogger } from '@sim/logger' import { toError } from '@sim/utils/errors' import { generateId } from '@sim/utils/id' import { toast } from '@/components/emcn' +import { uploadViaApiFallback } from '@/lib/uploads/client/api-fallback' +import { DirectUploadError, runUploadStrategy } from '@/lib/uploads/client/direct-upload' import { resolveFileType } from '@/lib/uploads/utils/file-utils' const logger = createLogger('useFileAttachments') @@ -115,6 +117,10 @@ export function useFileAttachments(props: UseFileAttachmentsProps) { logger.error('User ID not available for file upload') return } + if (!workspaceId) { + logger.error('workspaceId required for mothership uploads') + return + } const files = Array.from(fileList) if (files.length === 0) return @@ -134,49 +140,38 @@ export function useFileAttachments(props: UseFileAttachmentsProps) { setAttachedFiles((prev) => [...prev, ...placeholders]) + const presignedEndpoint = `/api/files/presigned?type=mothership&workspaceId=${encodeURIComponent(workspaceId)}` + await Promise.all( files.map(async (file, i) => { const placeholder = placeholders[i] try { - const formData = new FormData() - formData.append('file', file) - formData.append('context', 'mothership') - if (workspaceId) { - formData.append('workspaceId', workspaceId) - } - - // boundary-raw-fetch: multipart/form-data upload (FileUpload boundary), incompatible with requestJson which JSON-stringifies bodies - const uploadResponse = await fetch('/api/files/upload', { - method: 'POST', - body: formData, - }) - - if (!uploadResponse.ok) { - const errorData = await uploadResponse.json().catch(() => ({ - message: `Upload failed: ${uploadResponse.status}`, - })) - throw new Error( - errorData.message || - errorData.error || - `Failed to upload file: ${uploadResponse.status}` - ) + let result: { path: string; key: string } + try { + result = await runUploadStrategy({ + file, + workspaceId, + context: 'mothership', + presignedEndpoint, + }) + } catch (error) { + if (error instanceof DirectUploadError && error.code === 'FALLBACK_REQUIRED') { + const fallback = await uploadViaApiFallback(file, 'mothership', workspaceId) + if (!fallback.key) { + throw new Error('Invalid upload response: missing key') + } + result = { path: fallback.path, key: fallback.key } + } else { + throw error + } } - const uploadData = await uploadResponse.json() - - logger.info( - `File uploaded successfully: ${uploadData.fileInfo?.path || uploadData.path}` - ) + logger.info(`File uploaded successfully: ${result.path}`) setAttachedFiles((prev) => prev.map((f) => f.id === placeholder.id - ? { - ...f, - path: uploadData.fileInfo?.path || uploadData.path || uploadData.url, - key: uploadData.fileInfo?.key || uploadData.key, - uploading: false, - } + ? { ...f, path: result.path, key: result.key, uploading: false } : f ) ) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/file-upload/file-upload.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/file-upload/file-upload.tsx index 4faee74069e..e8dc74c8b19 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/file-upload/file-upload.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/file-upload/file-upload.tsx @@ -2,6 +2,7 @@ import { useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' +import { useQueryClient } from '@tanstack/react-query' import { X } from 'lucide-react' import { useParams } from 'next/navigation' import { Button, Combobox } from '@/components/emcn/components' @@ -12,7 +13,11 @@ import { fileDeleteContract } from '@/lib/api/contracts/storage-transfer' import { cn } from '@/lib/core/utils/cn' import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' -import { useWorkspaceFiles } from '@/hooks/queries/workspace-files' +import { + useUploadWorkspaceFile, + useWorkspaceFiles, + workspaceFilesKeys, +} from '@/hooks/queries/workspace-files' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store' @@ -165,6 +170,9 @@ export function FileUpload({ refetch: refetchWorkspaceFiles, } = useWorkspaceFiles(isPreview ? '' : workspaceId) + const uploadFileMutation = useUploadWorkspaceFile() + const queryClient = useQueryClient() + const value = isPreview ? previewValue : storeValue /** @@ -310,58 +318,25 @@ export function FileUpload({ for (const file of validFiles) { try { - const formData = new FormData() - formData.append('file', file) - formData.append('context', 'workspace') - - if (workspaceId) { - formData.append('workspaceId', workspaceId) - } - - // boundary-raw-fetch: multipart/form-data upload (FileUpload boundary), incompatible with requestJson which JSON-stringifies bodies - const response = await fetch('/api/files/upload', { - method: 'POST', - body: formData, + const data = await uploadFileMutation.mutateAsync({ + workspaceId, + file, + skipToast: true, + skipInvalidation: true, }) - const data = await response.json() - - if (!response.ok) { - const errorMessage = - data.message || data.error || `Failed to upload file: ${response.status}` - uploadErrors.push(`${file.name}: ${errorMessage}`) - - setUploadError(errorMessage) - - if (data.isDuplicate || response.status === 409) { - setTimeout(() => setUploadError(null), 5000) - } - continue - } - - if (data.success === false) { - const errorMessage = data.error || 'Upload failed' - uploadErrors.push(`${file.name}: ${errorMessage}`) - - setUploadError(errorMessage) - - if (data.isDuplicate) { - setTimeout(() => setUploadError(null), 5000) - } - continue - } - uploadedFiles.push({ - name: file.name, - path: data.file?.url || data.url, // Workspace: data.file.url, Non-workspace: data.url - key: data.file?.key || data.key, // Storage key for proper file access - size: file.size, - type: file.type, + name: data.file.name, + path: data.file.url, + key: data.file.key, + size: data.file.size, + type: data.file.type, }) } catch (error) { logger.error(`Error uploading ${file.name}:`, error) const errorMessage = error instanceof Error ? error.message : 'Unknown error' uploadErrors.push(`${file.name}: ${errorMessage}`) + setUploadError(errorMessage) } } @@ -377,6 +352,7 @@ export function FileUpload({ if (workspaceId) { void refetchWorkspaceFiles() + void queryClient.invalidateQueries({ queryKey: workspaceFilesKeys.storageInfo() }) } if (uploadedFiles.length === 1) { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts index e71041afd05..5f5ae679b8f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution.ts @@ -10,6 +10,7 @@ import { requestJson } from '@/lib/api/client/request' import { cancelWorkflowExecutionContract, workflowLogContract } from '@/lib/api/contracts/workflows' import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans' import { processStreamingBlockLogs } from '@/lib/tokenization' +import { DirectUploadError, runUploadStrategy } from '@/lib/uploads/client/direct-upload' import type { ExecutionPausedData } from '@/lib/workflows/executor/execution-events' import { extractTriggerMockPayload, @@ -505,65 +506,84 @@ export function useWorkflowExecution() { typeof (value as any).onUploadError === 'function' if (workflowInput.files && Array.isArray(workflowInput.files)) { try { + const presignedEndpoint = `/api/files/presigned?type=execution&workflowId=${encodeURIComponent(activeWorkflowId)}&executionId=${encodeURIComponent(executionId)}&workspaceId=${encodeURIComponent(workspaceId)}` for (const fileData of workflowInput.files) { - // Create FormData for upload - const formData = new FormData() - formData.append('file', fileData.file) - formData.append('context', 'execution') - formData.append('workflowId', activeWorkflowId) - formData.append('executionId', executionId) - formData.append('workspaceId', workspaceId) - - // boundary-raw-fetch: multipart/form-data file upload, requestJson only supports JSON bodies - const response = await fetch('/api/files/upload', { - method: 'POST', - body: formData, - }) - - if (response.ok) { - const uploadResult = await response.json() - // Convert upload result to clean UserFile format - const processUploadResult = (result: any) => ({ - id: - result.id || - `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`, - name: result.name, - url: result.url, - size: result.size, - type: result.type, + try { + const result = await runUploadStrategy({ + file: fileData.file, + workspaceId, + context: 'execution', + workflowId: activeWorkflowId, + executionId, + presignedEndpoint, + }) + uploadedFiles.push({ + id: `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`, + name: fileData.file.name, + url: result.path, + size: fileData.file.size, + type: fileData.file.type, key: result.key, - uploadedAt: result.uploadedAt, - expiresAt: result.expiresAt, }) - - // The API returns the file directly for single uploads - // or { files: [...] } for multiple uploads - if (uploadResult.files && Array.isArray(uploadResult.files)) { - uploadedFiles.push(...uploadResult.files.map(processUploadResult)) - } else if (uploadResult.path || uploadResult.url) { - // Single file upload - the result IS the file object - uploadedFiles.push(processUploadResult(uploadResult)) + } catch (uploadError) { + if ( + uploadError instanceof DirectUploadError && + uploadError.code === 'FALLBACK_REQUIRED' + ) { + const formData = new FormData() + formData.append('file', fileData.file) + formData.append('context', 'execution') + formData.append('workflowId', activeWorkflowId) + formData.append('executionId', executionId) + formData.append('workspaceId', workspaceId) + + // boundary-raw-fetch: local-dev fallback when cloud storage is not configured; multipart upload incompatible with requestJson + const response = await fetch('/api/files/upload', { + method: 'POST', + body: formData, + }) + if (!response.ok) { + const errorData = await response.json().catch(() => null) + const reason = + errorData?.message || errorData?.error || `${response.status}` + const message = `Failed to upload ${fileData.name}: ${reason}` + logger.error(message) + if (isUploadErrorCapable(workflowInput)) { + try { + workflowInput.onUploadError(message) + } catch {} + } + continue + } + const uploadResult = await response.json() + const processUploadResult = (r: any) => ({ + id: + r.id || + `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`, + name: r.name, + url: r.url, + size: r.size, + type: r.type, + key: r.key, + uploadedAt: r.uploadedAt, + expiresAt: r.expiresAt, + }) + if (uploadResult.files && Array.isArray(uploadResult.files)) { + uploadedFiles.push(...uploadResult.files.map(processUploadResult)) + } else if (uploadResult.path || uploadResult.url) { + uploadedFiles.push(processUploadResult(uploadResult)) + } } else { - logger.error('Unexpected upload response format:', uploadResult) - } - } else { - const cloned = response.clone() - const errorData = await response.json().catch(() => null) - const reason = - errorData?.message || - errorData?.error || - (await cloned.text().catch(() => '')) || - `${response.status}` - const message = `Failed to upload ${fileData.name}: ${reason}` - logger.error(message) - if (isUploadErrorCapable(workflowInput)) { - try { - workflowInput.onUploadError(message) - } catch {} + const message = `Failed to upload ${fileData.name}: ${toError(uploadError).message}` + logger.error(message) + if (isUploadErrorCapable(workflowInput)) { + try { + workflowInput.onUploadError(message) + } catch {} + } } } } - // Update workflow input with uploaded files workflowInput.files = uploadedFiles } catch (error) { logger.error('Error uploading files:', error) diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workspace-logo-upload.ts b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workspace-logo-upload.ts index 1fe74412935..23d41f3ac27 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workspace-logo-upload.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workspace-logo-upload.ts @@ -1,5 +1,7 @@ import { useCallback, useEffect, useRef, useState } from 'react' import { createLogger } from '@sim/logger' +import { uploadViaApiFallback } from '@/lib/uploads/client/api-fallback' +import { DirectUploadError, runUploadStrategy } from '@/lib/uploads/client/direct-upload' const logger = createLogger('WorkspaceLogoUpload') const MAX_FILE_SIZE = 5 * 1024 * 1024 // 5MB @@ -60,30 +62,30 @@ export function useWorkspaceLogoUpload({ }, []) const uploadFileToServer = useCallback(async (file: File): Promise => { - const formData = new FormData() - formData.append('file', file) - formData.append('context', 'workspace-logos') - if (workspaceIdRef.current) { - formData.append('workspaceId', workspaceIdRef.current) + const targetWorkspaceId = workspaceIdRef.current + if (!targetWorkspaceId) { + throw new Error('workspaceId is required for workspace logo upload') } - // boundary-raw-fetch: multipart/form-data upload (FileUpload boundary), incompatible with requestJson which JSON-stringifies bodies - const response = await fetch('/api/files/upload', { - method: 'POST', - body: formData, - }) - - if (!response.ok) { - const errorData = await response.json().catch(() => ({ message: response.statusText })) - throw new Error( - errorData.message || errorData.error || `Failed to upload file: ${response.status}` - ) + const presignedEndpoint = `/api/files/presigned?type=workspace-logos&workspaceId=${encodeURIComponent(targetWorkspaceId)}` + + try { + const result = await runUploadStrategy({ + file, + workspaceId: targetWorkspaceId, + context: 'workspace-logos', + presignedEndpoint, + }) + logger.info(`Workspace logo uploaded successfully: ${result.path}`) + return result.path + } catch (error) { + if (error instanceof DirectUploadError && error.code === 'FALLBACK_REQUIRED') { + const { path } = await uploadViaApiFallback(file, 'workspace-logos', targetWorkspaceId) + logger.info(`Workspace logo uploaded via API fallback: ${path}`) + return path + } + throw error } - - const data = await response.json() - const publicUrl = data.fileInfo?.path || data.path || data.url - logger.info(`Workspace logo uploaded successfully: ${publicUrl}`) - return publicUrl }, []) const processFile = useCallback( diff --git a/apps/sim/hooks/queries/workspace-files.ts b/apps/sim/hooks/queries/workspace-files.ts index 5e4364c5af6..8321996b219 100644 --- a/apps/sim/hooks/queries/workspace-files.ts +++ b/apps/sim/hooks/queries/workspace-files.ts @@ -206,6 +206,7 @@ interface UploadFileParams { onProgress?: (event: UploadProgressEvent) => void signal?: AbortSignal skipToast?: boolean + skipInvalidation?: boolean } interface UploadFileResponse { @@ -320,7 +321,8 @@ export function useUploadWorkspaceFile() { return useMutation({ mutationFn: ({ workspaceId, file, onProgress, signal }: UploadFileParams) => uploadWorkspaceFile(workspaceId, file, onProgress, signal), - onSettled: () => { + onSettled: (_data, _error, variables) => { + if (variables.skipInvalidation) return queryClient.invalidateQueries({ queryKey: workspaceFilesKeys.lists() }) queryClient.invalidateQueries({ queryKey: workspaceFilesKeys.storageInfo() }) }, @@ -331,9 +333,11 @@ export function useUploadWorkspaceFile() { }, onError: (error, variables) => { logger.error('Failed to upload file:', error) - toast.error(`Failed to upload "${variables.file.name}": ${error.message}`, { - duration: 5000, - }) + if (!variables.skipToast) { + toast.error(`Failed to upload "${variables.file.name}": ${error.message}`, { + duration: 5000, + }) + } }, }) } diff --git a/apps/sim/lib/api/contracts/storage-transfer.ts b/apps/sim/lib/api/contracts/storage-transfer.ts index 0dd14fcc536..ff2e9bcde5f 100644 --- a/apps/sim/lib/api/contracts/storage-transfer.ts +++ b/apps/sim/lib/api/contracts/storage-transfer.ts @@ -316,7 +316,15 @@ export const fileDeleteBodySchema = z .passthrough() const MAX_FILE_SIZE = 100 * 1024 * 1024 -export const validUploadTypes = ['knowledge-base', 'chat', 'copilot', 'profile-pictures'] as const +export const validUploadTypes = [ + 'knowledge-base', + 'chat', + 'copilot', + 'profile-pictures', + 'mothership', + 'workspace-logos', + 'execution', +] as const export const uploadTypeSchema = z.enum(validUploadTypes) diff --git a/apps/sim/lib/uploads/client/api-fallback.ts b/apps/sim/lib/uploads/client/api-fallback.ts new file mode 100644 index 00000000000..67b90fbc20c --- /dev/null +++ b/apps/sim/lib/uploads/client/api-fallback.ts @@ -0,0 +1,42 @@ +import type { StorageContext } from '@/lib/uploads/shared/types' + +/** + * Server-proxied fallback used only when cloud storage isn't configured (local dev). + * Production always takes the presigned PUT path. + */ +export async function uploadViaApiFallback( + file: File, + context: StorageContext, + workspaceId?: string +): Promise<{ path: string; key?: string }> { + const formData = new FormData() + formData.append('file', file) + formData.append('context', context) + if (workspaceId) { + formData.append('workspaceId', workspaceId) + } + + // boundary-raw-fetch: local-dev fallback when cloud storage is not configured; multipart upload incompatible with requestJson + const response = await fetch('/api/files/upload', { method: 'POST', body: formData }) + if (!response.ok) { + const errorData = (await response.json().catch(() => ({}))) as { + message?: string + error?: string + } + throw new Error( + errorData.message || errorData.error || `Failed to upload file: ${response.status}` + ) + } + const data = (await response.json()) as { + fileInfo?: { path?: string; key?: string } + path?: string + key?: string + url?: string + } + const path = data.fileInfo?.path ?? data.path ?? data.url + const key = data.fileInfo?.key ?? data.key + if (!path) { + throw new Error('Invalid upload response: missing path') + } + return { path, key } +} diff --git a/apps/sim/lib/uploads/client/direct-upload.ts b/apps/sim/lib/uploads/client/direct-upload.ts index 07322b071f4..7afaf1ae34d 100644 --- a/apps/sim/lib/uploads/client/direct-upload.ts +++ b/apps/sim/lib/uploads/client/direct-upload.ts @@ -303,7 +303,15 @@ const uploadViaPresignedPut = (opts: UploadViaPutOptions): Promise => { interface MultipartUploadOptions { file: File workspaceId: string - context: 'workspace' | 'knowledge-base' + context: + | 'workspace' + | 'knowledge-base' + | 'mothership' + | 'profile-pictures' + | 'workspace-logos' + | 'execution' + workflowId?: string + executionId?: string signal?: AbortSignal onProgress?: (event: UploadProgressEvent) => void } @@ -321,7 +329,7 @@ interface PartUrl { const uploadViaMultipart = async ( opts: MultipartUploadOptions ): Promise<{ key: string; path: string }> => { - const { file, workspaceId, context, signal, onProgress } = opts + const { file, workspaceId, context, workflowId, executionId, signal, onProgress } = opts // boundary-raw-fetch: multipart upload control plane uses action query strings; client lifecycle (initiate/get-part-urls/complete/abort) is sequenced manually and not modeled by a single contract const initiateResponse = await fetch('/api/files/multipart?action=initiate', { @@ -333,6 +341,8 @@ const uploadViaMultipart = async ( fileSize: file.size, workspaceId, context, + ...(workflowId ? { workflowId } : {}), + ...(executionId ? { executionId } : {}), }), signal, }) @@ -517,11 +527,21 @@ const uploadViaMultipart = async ( export interface RunUploadStrategyOptions { file: File workspaceId: string - context: 'workspace' | 'knowledge-base' + context: + | 'workspace' + | 'knowledge-base' + | 'mothership' + | 'profile-pictures' + | 'workspace-logos' + | 'execution' /** Endpoint to mint a presigned PUT URL. Required unless `presignedOverride` is provided. */ presignedEndpoint?: string /** Pre-fetched presigned data (e.g. from a batch endpoint). Skips per-file fetch. */ presignedOverride?: PresignedUploadInfo + /** Required when context is `execution`; forwarded to the multipart route to scope the storage key. */ + workflowId?: string + /** Required when context is `execution`; forwarded to the multipart route to scope the storage key. */ + executionId?: string signal?: AbortSignal onProgress?: (event: UploadProgressEvent) => void } @@ -537,8 +557,17 @@ export interface RunUploadStrategyOptions { export const runUploadStrategy = async ( opts: RunUploadStrategyOptions ): Promise => { - const { file, presignedEndpoint, presignedOverride, workspaceId, context, signal, onProgress } = - opts + const { + file, + presignedEndpoint, + presignedOverride, + workspaceId, + context, + workflowId, + executionId, + signal, + onProgress, + } = opts const contentType = getFileContentType(file) if (presignedOverride && !presignedOverride.directUploadSupported) { @@ -550,6 +579,8 @@ export const runUploadStrategy = async ( file, workspaceId, context, + workflowId, + executionId, signal, onProgress, }) diff --git a/bun.lock b/bun.lock index 3c658d61d32..9c5e1dedc68 100644 --- a/bun.lock +++ b/bun.lock @@ -1,5 +1,6 @@ { "lockfileVersion": 1, + "configVersion": 0, "workspaces": { "": { "name": "simstudio", From 6cb779601a6fef192b04ab0d4942a7c45d3a7287 Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Thu, 7 May 2026 20:34:53 -0700 Subject: [PATCH 16/33] feat(search-replace): search & replace, cut, deploy modal ui flicker (#4507) * feat(search): workflow search and replace * fix alignment * fix hidden fields bug * fix loops/parallel badge case * resource resolver * add cut * update docs * address comments * make source code for func blocks dispay resolved code instead * fix match issue * fix padding --- .../docs/en/keyboard-shortcuts/index.mdx | 4 +- apps/realtime/src/database/operations.ts | 125 +++- apps/realtime/src/handlers/subblocks.ts | 37 +- .../src/middleware/permissions.test.ts | 15 + apps/realtime/src/middleware/permissions.ts | 1 + .../[workspaceId]/utils/commands-utils.ts | 8 +- .../components/block-menu/block-menu.tsx | 13 + .../components/canvas-menu/canvas-menu.tsx | 18 +- .../components/deploy-modal/deploy-modal.tsx | 11 +- .../panel/components/deploy/deploy.tsx | 22 +- .../editor/components/sub-block/sub-block.tsx | 21 +- .../subflow-editor/subflow-editor.tsx | 70 ++- .../panel/components/editor/editor.tsx | 67 ++- .../panel/components/toolbar/toolbar.tsx | 6 +- .../w/[workflowId]/components/panel/panel.tsx | 3 +- ...e-workflow-resource-replacement-options.ts | 51 ++ ...use-workflow-search-reference-hydration.ts | 106 ++++ .../search-replace/replacement-controls.tsx | 85 +++ .../workflow-search-replace.tsx | 562 ++++++++++++++++++ .../[workspaceId]/w/[workflowId]/workflow.tsx | 107 +++- apps/sim/executor/execution/block-executor.ts | 12 +- .../function/function-handler.test.ts | 10 +- .../handlers/function/function-handler.ts | 11 +- apps/sim/executor/variables/resolver.test.ts | 14 +- apps/sim/executor/variables/resolver.ts | 45 +- .../queries/workflow-search-replace.test.ts | 53 ++ .../hooks/queries/workflow-search-replace.ts | 290 +++++++++ apps/sim/hooks/use-collaborative-workflow.ts | 96 +++ apps/sim/hooks/use-undo-redo.ts | 224 ++++++- .../tools/handlers/platform-actions.ts | 3 +- .../workflows/search-replace/dependencies.ts | 17 + .../workflows/search-replace/indexer.test.ts | 356 +++++++++++ .../lib/workflows/search-replace/indexer.ts | 360 +++++++++++ .../search-replace/reference-registry.ts | 152 +++++ .../search-replace/replacement-validation.ts | 97 +++ .../search-replace/replacements.test.ts | 333 +++++++++++ .../workflows/search-replace/replacements.ts | 295 +++++++++ .../search-replace/resource-resolvers.ts | 108 ++++ .../search-replace/search-replace.fixtures.ts | 138 +++++ .../workflows/search-replace/state.test.ts | 46 ++ .../sim/lib/workflows/search-replace/state.ts | 17 + .../search-replace/subflow-fields.ts | 159 +++++ .../sim/lib/workflows/search-replace/types.ts | 151 +++++ .../workflows/search-replace/value-walker.ts | 77 +++ apps/sim/stores/panel/editor/store.ts | 23 +- apps/sim/stores/undo-redo/types.ts | 20 + apps/sim/stores/undo-redo/utils.test.ts | 57 +- apps/sim/stores/undo-redo/utils.ts | 23 + .../stores/workflow-search-replace/store.ts | 42 ++ .../stores/workflows/workflow/utils.test.ts | 54 +- apps/sim/stores/workflows/workflow/utils.ts | 18 +- packages/realtime-protocol/src/constants.ts | 2 + packages/realtime-protocol/src/schemas.ts | 19 + packages/workflow-types/src/workflow.ts | 49 ++ 54 files changed, 4526 insertions(+), 177 deletions(-) create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/search-replace/hooks/use-workflow-resource-replacement-options.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/search-replace/hooks/use-workflow-search-reference-hydration.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/search-replace/replacement-controls.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/search-replace/workflow-search-replace.tsx create mode 100644 apps/sim/hooks/queries/workflow-search-replace.test.ts create mode 100644 apps/sim/hooks/queries/workflow-search-replace.ts create mode 100644 apps/sim/lib/workflows/search-replace/dependencies.ts create mode 100644 apps/sim/lib/workflows/search-replace/indexer.test.ts create mode 100644 apps/sim/lib/workflows/search-replace/indexer.ts create mode 100644 apps/sim/lib/workflows/search-replace/reference-registry.ts create mode 100644 apps/sim/lib/workflows/search-replace/replacement-validation.ts create mode 100644 apps/sim/lib/workflows/search-replace/replacements.test.ts create mode 100644 apps/sim/lib/workflows/search-replace/replacements.ts create mode 100644 apps/sim/lib/workflows/search-replace/resource-resolvers.ts create mode 100644 apps/sim/lib/workflows/search-replace/search-replace.fixtures.ts create mode 100644 apps/sim/lib/workflows/search-replace/state.test.ts create mode 100644 apps/sim/lib/workflows/search-replace/state.ts create mode 100644 apps/sim/lib/workflows/search-replace/subflow-fields.ts create mode 100644 apps/sim/lib/workflows/search-replace/types.ts create mode 100644 apps/sim/lib/workflows/search-replace/value-walker.ts create mode 100644 apps/sim/stores/workflow-search-replace/store.ts diff --git a/apps/docs/content/docs/en/keyboard-shortcuts/index.mdx b/apps/docs/content/docs/en/keyboard-shortcuts/index.mdx index ad9ece3b50f..c5459d6369b 100644 --- a/apps/docs/content/docs/en/keyboard-shortcuts/index.mdx +++ b/apps/docs/content/docs/en/keyboard-shortcuts/index.mdx @@ -31,10 +31,12 @@ Speed up your workflow building with these keyboard shortcuts and mouse controls | `Mod` + `Z` | Undo | | `Mod` + `Shift` + `Z` | Redo | | `Mod` + `C` | Copy selected blocks | +| `Mod` + `X` | Cut selected blocks | | `Mod` + `V` | Paste blocks | | `Delete` or `Backspace` | Delete selected blocks or edges | | `Shift` + `L` | Auto-layout canvas | | `Mod` + `Shift` + `F` | Fit to view | +| `Mod` + `F` | Open workflow search and replace | | `Mod` + `Shift` + `Enter` | Accept Copilot changes | ## Panel Navigation @@ -43,7 +45,7 @@ These shortcuts switch between panel tabs on the right side of the canvas. | Shortcut | Action | |----------|--------| -| `Mod` + `F` | Focus Toolbar search | +| `Mod` + `Alt` + `F` | Focus Toolbar search | ## Global Navigation diff --git a/apps/realtime/src/database/operations.ts b/apps/realtime/src/database/operations.ts index 279b56a9b03..14fa8639eaf 100644 --- a/apps/realtime/src/database/operations.ts +++ b/apps/realtime/src/database/operations.ts @@ -8,6 +8,7 @@ import { EDGE_OPERATIONS, EDGES_OPERATIONS, OPERATION_TARGETS, + SUBBLOCK_OPERATIONS, SUBFLOW_OPERATIONS, VARIABLE_OPERATIONS, WORKFLOW_OPERATIONS, @@ -15,6 +16,7 @@ import { import { getActiveWorkflowContext } from '@sim/workflow-authz' import { loadWorkflowFromNormalizedTablesRaw } from '@sim/workflow-persistence/load' import { mergeSubBlockValues } from '@sim/workflow-persistence/subblocks' +import { isWorkflowBlockProtected } from '@sim/workflow-types/workflow' import { and, eq, inArray, isNull, or, sql } from 'drizzle-orm' import { drizzle } from 'drizzle-orm/postgres-js' import postgres from 'postgres' @@ -46,26 +48,6 @@ interface DbBlockRef { data: unknown } -/** - * Checks if a block is protected (locked or inside a locked ancestor). - * Works with raw DB records. - */ -function isDbBlockProtected(blockId: string, blocksById: Record): boolean { - const block = blocksById[blockId] - if (!block) return false - if (block.locked) return true - const visited = new Set() - let parentId = (block.data as Record | null)?.parentId as string | undefined - while (parentId && !visited.has(parentId)) { - visited.add(parentId) - if (blocksById[parentId]?.locked) return true - parentId = (blocksById[parentId]?.data as Record | null)?.parentId as - | string - | undefined - } - return false -} - /** * Finds all descendant block IDs of a container (recursive). * Works with raw DB block arrays. @@ -251,6 +233,9 @@ export async function persistWorkflowOperation(workflowId: string, operation: an case OPERATION_TARGETS.SUBFLOW: await handleSubflowOperationTx(tx, workflowId, op, payload) break + case OPERATION_TARGETS.SUBBLOCK: + await handleSubblockOperationTx(tx, workflowId, op, payload) + break case OPERATION_TARGETS.VARIABLE: await handleVariableOperationTx(tx, workflowId, op, payload) break @@ -876,7 +861,7 @@ async function handleBlocksOperationTx( ) // Filter out protected blocks from deletion request - const deletableIds = ids.filter((id) => !isDbBlockProtected(id, blocksById)) + const deletableIds = ids.filter((id) => !isWorkflowBlockProtected(id, blocksById)) if (deletableIds.length === 0) { logger.info('All requested blocks are protected, skipping deletion') return @@ -991,14 +976,14 @@ async function handleBlocksOperationTx( // Collect all blocks to toggle including descendants of containers for (const id of blockIds) { const block = blocksById[id] - if (!block || isDbBlockProtected(id, blocksById)) continue + if (!block || isWorkflowBlockProtected(id, blocksById)) continue blocksToToggle.add(id) // If it's a loop or parallel, also include all non-locked descendants if (block.type === 'loop' || block.type === 'parallel') { for (const descId of findDbDescendants(id, allBlocks)) { - if (!isDbBlockProtected(descId, blocksById)) { + if (!isWorkflowBlockProtected(descId, blocksById)) { blocksToToggle.add(descId) } } @@ -1053,7 +1038,7 @@ async function handleBlocksOperationTx( // Filter to only toggle handles on unprotected blocks const blocksToToggle = blockIds.filter( - (id) => blocksById[id] && !isDbBlockProtected(id, blocksById) + (id) => blocksById[id] && !isWorkflowBlockProtected(id, blocksById) ) if (blocksToToggle.length === 0) { logger.info('All requested blocks are protected, skipping handles toggle') @@ -1165,13 +1150,13 @@ async function handleBlocksOperationTx( if (!id) continue // Skip protected blocks (locked or inside locked container) - if (isDbBlockProtected(id, blocksById)) { + if (isWorkflowBlockProtected(id, blocksById)) { logger.info(`Skipping block ${id} parent update - block is protected`) continue } // Skip if trying to move into a locked container (or any of its ancestors) - if (parentId && isDbBlockProtected(parentId, blocksById)) { + if (parentId && isWorkflowBlockProtected(parentId, blocksById)) { logger.info(`Skipping block ${id} parent update - target parent ${parentId} is protected`) continue } @@ -1295,7 +1280,7 @@ async function handleEdgeOperationTx(tx: any, workflowId: string, operation: str } } - if (isDbBlockProtected(payload.target, blocksById)) { + if (isWorkflowBlockProtected(payload.target, blocksById)) { logger.info(`Skipping edge add - target block is protected`) break } @@ -1383,7 +1368,7 @@ async function handleEdgeOperationTx(tx: any, workflowId: string, operation: str } } - if (isDbBlockProtected(edgeToRemove.targetBlockId, blocksById)) { + if (isWorkflowBlockProtected(edgeToRemove.targetBlockId, blocksById)) { logger.info(`Skipping edge remove - target block is protected`) break } @@ -1494,7 +1479,7 @@ async function handleEdgesOperationTx( } const safeEdgeIds = edgesToRemove - .filter((e: EdgeToRemove) => !isDbBlockProtected(e.targetBlockId, blocksById)) + .filter((e: EdgeToRemove) => !isWorkflowBlockProtected(e.targetBlockId, blocksById)) .map((e: EdgeToRemove) => e.id) if (safeEdgeIds.length === 0) { @@ -1581,7 +1566,7 @@ async function handleEdgesOperationTx( // Filter edges - only add edges where target block is not protected const safeEdges = (edges as Array>).filter( - (e) => !isDbBlockProtected(e.target as string, blocksById) + (e) => !isWorkflowBlockProtected(e.target as string, blocksById) ) if (safeEdges.length === 0) { @@ -1734,6 +1719,86 @@ async function handleSubflowOperationTx( } } +function valuesEqual(left: unknown, right: unknown): boolean { + return JSON.stringify(left) === JSON.stringify(right) +} + +// Subblock operations - targeted value updates without replacing workflow state +async function handleSubblockOperationTx( + tx: any, + workflowId: string, + operation: string, + payload: any +) { + switch (operation) { + case SUBBLOCK_OPERATIONS.BATCH_UPDATE: { + const updates = payload.updates + if (!Array.isArray(updates) || updates.length === 0) { + return + } + + const allBlocks = await tx + .select({ + id: workflowBlocks.id, + subBlocks: workflowBlocks.subBlocks, + locked: workflowBlocks.locked, + data: workflowBlocks.data, + }) + .from(workflowBlocks) + .where(eq(workflowBlocks.workflowId, workflowId)) + + type SubblockUpdateBlockRecord = (typeof allBlocks)[number] + const blocksById: Record = Object.fromEntries( + allBlocks.map((block: SubblockUpdateBlockRecord) => [block.id, block]) + ) + + for (const update of updates) { + const { blockId, subblockId, value, expectedValue } = update + if (!blockId || !subblockId) { + throw new Error('Missing required fields for subblock batch update') + } + + const block = blocksById[blockId] + if (!block) { + throw new Error(`Block ${blockId} not found`) + } + + if (isWorkflowBlockProtected(blockId, blocksById)) { + throw new Error(`Block ${blockId} is locked or inside a locked container`) + } + + const subBlocks = { ...((block.subBlocks as Record) || {}) } + const currentSubBlock = subBlocks[subblockId] + const currentValue = currentSubBlock?.value + if (expectedValue !== undefined && !valuesEqual(currentValue, expectedValue)) { + throw new Error(`Subblock ${blockId}.${subblockId} changed since replacement was planned`) + } + + subBlocks[subblockId] = currentSubBlock + ? { ...currentSubBlock, value } + : { id: subblockId, type: 'unknown', value } + + await tx + .update(workflowBlocks) + .set({ + subBlocks, + updatedAt: new Date(), + }) + .where(and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId))) + + blocksById[blockId] = { ...block, subBlocks } + } + + logger.debug(`Batch updated ${updates.length} subblocks for workflow ${workflowId}`) + break + } + + default: + logger.warn(`Unknown subblock operation: ${operation}`) + throw new Error(`Unsupported subblock operation: ${operation}`) + } +} + // Variable operations - updates workflow.variables JSON field async function handleVariableOperationTx( tx: any, diff --git a/apps/realtime/src/handlers/subblocks.ts b/apps/realtime/src/handlers/subblocks.ts index b3be99e7457..4650f8487cc 100644 --- a/apps/realtime/src/handlers/subblocks.ts +++ b/apps/realtime/src/handlers/subblocks.ts @@ -3,6 +3,7 @@ import { workflow, workflowBlocks } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { SUBBLOCK_OPERATIONS } from '@sim/realtime-protocol/constants' import { assertWorkflowMutable, WorkflowLockedError } from '@sim/workflow-authz' +import { isWorkflowBlockProtected } from '@sim/workflow-types/workflow' import { and, eq } from 'drizzle-orm' import type { AuthenticatedSocket } from '@/middleware/auth' import { checkRolePermission } from '@/middleware/permissions' @@ -273,45 +274,33 @@ async function flushSubblockUpdate( let updateSuccessful = false let blockLocked = false await db.transaction(async (tx) => { - const [block] = await tx + const allBlocks = await tx .select({ + id: workflowBlocks.id, subBlocks: workflowBlocks.subBlocks, locked: workflowBlocks.locked, data: workflowBlocks.data, }) .from(workflowBlocks) - .where(and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId))) - .limit(1) + .where(eq(workflowBlocks.workflowId, workflowId)) + type SubblockUpdateBlockRecord = (typeof allBlocks)[number] + const blocksById: Record = Object.fromEntries( + allBlocks.map((block: SubblockUpdateBlockRecord) => [block.id, block]) + ) + const block = blocksById[blockId] if (!block) { return } - // Check if block is locked directly - if (block.locked) { - logger.info(`Skipping subblock update - block ${blockId} is locked`) + if (isWorkflowBlockProtected(blockId, blocksById)) { + logger.info( + `Skipping subblock update - block ${blockId} is locked or inside a locked container` + ) blockLocked = true return } - // Check if block is inside a locked parent container - const parentId = (block.data as Record | null)?.parentId as - | string - | undefined - if (parentId) { - const [parentBlock] = await tx - .select({ locked: workflowBlocks.locked }) - .from(workflowBlocks) - .where(and(eq(workflowBlocks.id, parentId), eq(workflowBlocks.workflowId, workflowId))) - .limit(1) - - if (parentBlock?.locked) { - logger.info(`Skipping subblock update - parent ${parentId} is locked`) - blockLocked = true - return - } - } - const subBlocks = (block.subBlocks as any) || {} if (!subBlocks[subblockId]) { subBlocks[subblockId] = { id: subblockId, type: 'unknown', value } diff --git a/apps/realtime/src/middleware/permissions.test.ts b/apps/realtime/src/middleware/permissions.test.ts index 2d8cd12999c..0aa9cada905 100644 --- a/apps/realtime/src/middleware/permissions.test.ts +++ b/apps/realtime/src/middleware/permissions.test.ts @@ -51,6 +51,11 @@ describe('checkRolePermission', () => { const result = checkRolePermission('admin', 'replace-state') expectPermissionAllowed(result) }) + + it('should allow subblock-batch-update operation', () => { + const result = checkRolePermission('admin', 'subblock-batch-update') + expectPermissionAllowed(result) + }) }) describe('write role', () => { @@ -77,6 +82,11 @@ describe('checkRolePermission', () => { const result = checkRolePermission('write', 'update-position') expectPermissionAllowed(result) }) + + it('should allow subblock-batch-update operation', () => { + const result = checkRolePermission('write', 'subblock-batch-update') + expectPermissionAllowed(result) + }) }) describe('read role', () => { @@ -111,6 +121,11 @@ describe('checkRolePermission', () => { expectPermissionDenied(result, 'read') }) + it('should deny subblock-batch-update operation for read role', () => { + const result = checkRolePermission('read', 'subblock-batch-update') + expectPermissionDenied(result, 'read') + }) + it('should deny toggle-enabled operation for read role', () => { const result = checkRolePermission('read', 'toggle-enabled') expectPermissionDenied(result, 'read') diff --git a/apps/realtime/src/middleware/permissions.ts b/apps/realtime/src/middleware/permissions.ts index dcc893b1478..661f4d52d44 100644 --- a/apps/realtime/src/middleware/permissions.ts +++ b/apps/realtime/src/middleware/permissions.ts @@ -46,6 +46,7 @@ const WRITE_OPERATIONS: string[] = [ SUBFLOW_OPERATIONS.UPDATE, // Subblock operations SUBBLOCK_OPERATIONS.UPDATE, + SUBBLOCK_OPERATIONS.BATCH_UPDATE, // Variable operations VARIABLE_OPERATIONS.UPDATE, // Workflow operations diff --git a/apps/sim/app/workspace/[workspaceId]/utils/commands-utils.ts b/apps/sim/app/workspace/[workspaceId]/utils/commands-utils.ts index 8bdbeac2577..bb237c157c9 100644 --- a/apps/sim/app/workspace/[workspaceId]/utils/commands-utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/utils/commands-utils.ts @@ -14,6 +14,7 @@ export type CommandId = // | 'goto-templates' | 'goto-logs' | 'open-search' + | 'open-workflow-search-replace' | 'run-workflow' | 'clear-terminal-console' | 'focus-toolbar-search' @@ -79,6 +80,11 @@ export const COMMAND_DEFINITIONS: Record = { shortcut: 'Mod+K', allowInEditable: true, }, + 'open-workflow-search-replace': { + id: 'open-workflow-search-replace', + shortcut: 'Mod+F', + allowInEditable: true, + }, 'run-workflow': { id: 'run-workflow', shortcut: 'Mod+Enter', @@ -91,7 +97,7 @@ export const COMMAND_DEFINITIONS: Record = { }, 'focus-toolbar-search': { id: 'focus-toolbar-search', - shortcut: 'Mod+F', + shortcut: 'Mod+Alt+F', allowInEditable: false, }, 'clear-notifications': { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/block-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/block-menu.tsx index 79e8464bf49..f77a76394c7 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/block-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/block-menu/block-menu.tsx @@ -35,6 +35,7 @@ export interface BlockMenuProps { onClose: () => void selectedBlocks: BlockInfo[] onCopy: () => void + onCut: () => void onPaste: () => void onDuplicate: () => void onDelete: () => void @@ -74,6 +75,7 @@ export function BlockMenu({ onClose, selectedBlocks, onCopy, + onCut, onPaste, onDuplicate, onDelete, @@ -162,6 +164,17 @@ export function BlockMenu({ Copy ⌘C + { + onCut() + onClose() + }} + > + Cut + ⌘X + void onFitToView: () => void onOpenLogs: () => void + onOpenSearchReplace: () => void onToggleVariables: () => void onToggleChat: () => void onToggleWorkflowLock?: () => void @@ -59,6 +60,7 @@ export function CanvasMenu({ onAutoLayout, onFitToView, onOpenLogs, + onOpenSearchReplace, onToggleVariables, onToggleChat, onToggleWorkflowLock, @@ -114,9 +116,6 @@ export function CanvasMenu({ Redo ⌘⇧Z - - {/* Edit and creation actions */} - Paste ⌘V + + {/* Edit and creation actions */} + + { + onOpenSearchReplace() + onClose() + }} + > + Search and replace + ⌘F + { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal.tsx index 1103e3d7fe6..8e0b53a195f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal.tsx @@ -1050,13 +1050,12 @@ export function DeployModal({ interface StatusBadgeProps { isWarning: boolean - isSyncing?: boolean } -function StatusBadge({ isWarning, isSyncing = false }: StatusBadgeProps) { - const label = isSyncing ? 'Syncing changes' : isWarning ? 'Update deployment' : 'Live' +function StatusBadge({ isWarning }: StatusBadgeProps) { + const label = isWarning ? 'Update deployment' : 'Live' return ( - + {label} ) @@ -1111,7 +1110,7 @@ function GeneralFooter({ const isDeployBlocked = deployReadiness.isBlocked || isDeploymentSettling || isSubmitting || isUndeploying const blockedMessage = - deployReadiness.isBlocked && !isDeploymentSettling && !isSubmitting && !isUndeploying + deployReadiness.isBlocked && !deployReadiness.isSyncing && !isSubmitting && !isUndeploying ? deployReadiness.tooltip : null const deployActionLoading = isSubmitting || isDeploymentSettling @@ -1133,7 +1132,7 @@ function GeneralFooter({ return (
    - + {blockedMessage && (
    {blockedMessage}
    )} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/deploy.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/deploy.tsx index ec630e08781..cc5a65825f0 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/deploy.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/deploy.tsx @@ -1,7 +1,7 @@ 'use client' import { useState } from 'react' -import { Button, Loader, Tooltip } from '@/components/emcn' +import { Button, Tooltip } from '@/components/emcn' import { DeployModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/deploy-modal' import { useChangeDetection, @@ -65,12 +65,16 @@ export function Deploy({ isDeploying || !canDeploy || isEmpty || - isDeploymentSettling || - (!isDeployed && deployReadiness.isBlocked) + (!isDeployed && deployReadiness.isBlocked && !deployReadiness.isSyncing) const onDeployClick = async () => { if (disabled || !canDeploy || !activeWorkflowId) return + if (isDeploymentSettling) { + setIsModalOpen(true) + return + } + const result = await handleDeployClick() if (result.shouldOpenModal) { setIsModalOpen(true) @@ -106,9 +110,6 @@ export function Deploy({ } const getButtonLabel = () => { - if (isDeployed && (changeDetected || isDeploymentSettling)) { - return 'Update' - } if (changeDetected) { return 'Update' } @@ -126,18 +127,11 @@ export function Deploy({ diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx index a354f177e08..db50e5b3200 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx @@ -72,6 +72,8 @@ const FOLDER_OVERRIDES: SelectorOverrides = { }, } +const WORKFLOW_SEARCH_CURRENT_MATCH_CLASS = 'rounded-md bg-orange-400 px-1 py-0.5' + /** * Interface for wand control handlers exposed by sub-block inputs */ @@ -103,6 +105,7 @@ interface SubBlockProps { labelSuffix?: React.ReactNode /** Provides sibling values for dependency resolution in non-preview contexts (e.g. tool-input) */ dependencyContext?: Record + isSearchHighlighted?: boolean } /** @@ -229,6 +232,7 @@ const renderLabel = ( onCopy: () => void }, labelSuffix?: React.ReactNode, + isSearchHighlighted?: boolean, externalLink?: { show: boolean onClick: () => void @@ -248,7 +252,11 @@ const renderLabel = ( return (
    onContextMenu(e, row)}> onContextMenu(e, row)}> + + {columns.map((column, colIndex) => { + const inRange = + sel !== null && + rowIndex >= sel.startRow && + rowIndex <= sel.endRow && + colIndex >= sel.startCol && + colIndex <= sel.endCol + const isAnchor = sel !== null && rowIndex === sel.anchorRow && colIndex === sel.anchorCol + const isEditing = editingColumnName === column.name + const isHighlighted = inRange || isRowChecked + + const isTopEdge = inRange ? rowIndex === sel!.startRow : isRowChecked + const isBottomEdge = inRange ? rowIndex === sel!.endRow : isRowChecked + const isLeftEdge = inRange ? colIndex === sel!.startCol : colIndex === 0 + const isRightEdge = inRange ? colIndex === sel!.endCol : colIndex === columns.length - 1 + + return ( + + ) + })} + + ) +}, dataRowPropsAreEqual) diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-grid.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-grid.tsx index 7e42350e6be..90dfb738a01 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-grid.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-grid.tsx @@ -1,16 +1,18 @@ 'use client' -import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react' +import type React from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { useParams } from 'next/navigation' import { usePostHog } from 'posthog-js/react' -import { Button, Checkbox, Skeleton, toast } from '@/components/emcn' -import { PlayOutline, Plus, Square, TableX } from '@/components/emcn/icons' +import { Skeleton, toast } from '@/components/emcn' +import { TableX } from '@/components/emcn/icons' import type { RunMode } from '@/lib/api/contracts/tables' import { cn } from '@/lib/core/utils/cn' import { captureEvent } from '@/lib/posthog/client' -import type { ColumnDefinition, TableRow as TableRowType, WorkflowGroup } from '@/lib/table' -import { getUnmetGroupDeps, isExecInFlight } from '@/lib/table/deps' +import type { ColumnDefinition, TableRow as TableRowType } from '@/lib/table' +import { TABLE_LIMITS } from '@/lib/table/constants' +import { isExecInFlight } from '@/lib/table/deps' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { useAddTableColumn, @@ -39,89 +41,45 @@ import { ContextMenu } from '../context-menu' import { NewColumnDropdown } from '../new-column-dropdown' import { RunStatusControl } from '../run-status-control' import type { WorkflowConfig } from '../workflow-sidebar' -import { CellContent, ExpandedCellPopover } from './cells' -import { COL_WIDTH, SELECTION_TINT_BG } from './constants' +import { ExpandedCellPopover } from './cells' +import { ADD_COL_WIDTH, CELL_HEADER_CHECKBOX, COL_WIDTH, SELECTION_TINT_BG } from './constants' +import { DataRow } from './data-row' import { ColumnHeaderMenu, WorkflowGroupMetaCell } from './headers' +import { + AddRowButton, + SelectAllCheckbox, + TableBodySkeleton, + TableColGroup, +} from './table-primitives' import type { DisplayColumn } from './types' import { buildHeaderGroups, type CellCoord, + checkboxColLayout, classifyExecStatusMix, collectRowSnapshots, computeNormalizedSelection, type ExecStatusMix, expandToDisplayColumns, moveCell, - type NormalizedSelection, - readExecution, + ROW_SELECTION_ALL, + ROW_SELECTION_NONE, + type RowSelection, + rowSelectionCoversAll, + rowSelectionIncludes, + rowSelectionIsEmpty, + rowSelectionMaterialize, } from './utils' const logger = createLogger('TableView') -type RowSelection = { kind: 'none' } | { kind: 'some'; ids: Set } | { kind: 'all' } - -const ROW_SELECTION_NONE: RowSelection = { kind: 'none' } -const ROW_SELECTION_ALL: RowSelection = { kind: 'all' } - -function rowSelectionIncludes(sel: RowSelection, id: string): boolean { - if (sel.kind === 'all') return true - if (sel.kind === 'some') return sel.ids.has(id) - return false -} - -function rowSelectionIsEmpty(sel: RowSelection): boolean { - if (sel.kind === 'none') return true - if (sel.kind === 'some') return sel.ids.size === 0 - return false -} - -function rowSelectionMaterialize(sel: RowSelection, rows: TableRowType[]): Set { - if (sel.kind === 'all') return new Set(rows.map((r) => r.id)) - if (sel.kind === 'some') return new Set(sel.ids) - return new Set() -} - -function rowSelectionCoversAll(sel: RowSelection, rows: TableRowType[]): boolean { - if (rows.length === 0) return false - if (sel.kind === 'all') return true - if (sel.kind === 'none') return false - if (sel.ids.size < rows.length) return false - for (const r of rows) if (!sel.ids.has(r.id)) return false - return true -} - const COL_WIDTH_MIN = 80 const COL_WIDTH_AUTO_FIT_MAX = 1000 -const ADD_COL_WIDTH = 120 - -/** Returns sticky row-number column dimensions sized to the digit count of `maxRows`. */ -function checkboxColLayout( - maxRows: number, - hasWorkflowCols: boolean -): { colWidth: number; numDivWidth: number } { - const digits = maxRows > 0 ? Math.floor(Math.log10(maxRows)) + 1 : 1 - const numDivWidth = Math.max(20, digits * 8 + 4) - const colWidth = Math.max(32, numDivWidth + 8) + (hasWorkflowCols ? 16 : 0) - return { colWidth, numDivWidth } -} const SKELETON_COL_COUNT = 4 -const SKELETON_ROW_COUNT = 10 const ROW_HEIGHT_ESTIMATE = 35 -const CELL = 'border-[var(--border)] border-r border-b px-2 py-[7px] align-middle select-none' -const CELL_CHECKBOX = - 'sticky left-0 z-[6] border-[var(--border)] border-r border-b bg-[var(--bg)] px-1 py-[7px] align-middle select-none' const CELL_HEADER = 'border-[var(--border)] border-r border-b bg-[var(--bg)] px-2 py-[7px] text-left align-middle' -const CELL_HEADER_CHECKBOX = - 'sticky left-0 z-[12] border-[var(--border)] border-r border-b bg-[var(--bg)] px-1 py-[7px] text-center align-middle' -// Fixed height (not min-) so a Badge-rendered status pill doesn't make the row -// grow vs a plain-text neighbor. Sized to comfortably contain the badge; the -// flex centers plain text + badges on the same baseline. -const CELL_CONTENT = - 'relative flex h-[22px] min-w-0 items-center overflow-clip text-ellipsis whitespace-nowrap text-small' -const SELECTION_OVERLAY = - 'pointer-events-none absolute -top-px -right-px -bottom-px z-[5] border-[2px] border-[var(--selection)]' /** * Snapshot of grid selection state the wrapper needs to render ``. @@ -139,7 +97,7 @@ export interface SelectionSnapshot { hasWorkflowColumns: boolean /** Cells the Play / Refresh / Stop buttons act on. Null when the selection * contains no workflow output cells. */ - selectedRunScope: { groupIds: string[]; rowIds: string[] } | null + selectedRunScope: { groupIds: string[]; rowIds: string[]; allRows: boolean } | null /** Drives Play (`hasIncompleteOrFailed`) / Refresh (`hasCompleted`) / * Stop (`hasInFlight`) visibility on the action bar. */ selectionStats: ExecStatusMix @@ -234,6 +192,32 @@ interface TableGridProps { > } +/** + * Split updates into chunks bounded by the server batch-size limit, dispatching + * up to 3 chunks concurrently. Throws on first failure — `Promise.all` rejects + * immediately, so partial success cannot leave the table in an ambiguous state. + */ +async function chunkBatchUpdates( + updates: Array<{ rowId: string; data: Record }>, + mutateAsync: (args: { + updates: Array<{ rowId: string; data: Record }> + }) => Promise +): Promise { + const size = TABLE_LIMITS.MAX_BULK_OPERATION_SIZE + const chunks: Array }>> = [] + for (let i = 0; i < updates.length; i += size) { + chunks.push(updates.slice(i, i + size)) + } + let cursor = 0 + await Promise.all( + Array.from({ length: Math.min(3, chunks.length) }, async () => { + while (cursor < chunks.length) { + await mutateAsync({ updates: chunks[cursor++]! }) + } + }) + ) +} + export function TableGrid({ workspaceId: propWorkspaceId, tableId: propTableId, @@ -313,6 +297,7 @@ export function TableGrid({ tableWorkflowGroups, workflowStates, columnSourceInfo, + ensureAllRowsLoaded, } = useTable({ workspaceId, tableId, queryOptions }) const fetchNextPageRef = useRef(fetchNextPage) @@ -321,6 +306,8 @@ export function TableGrid({ hasNextPageRef.current = hasNextPage const isFetchingNextPageRef = useRef(isFetchingNextPage) isFetchingNextPageRef.current = isFetchingNextPage + const ensureAllRowsLoadedRef = useRef(ensureAllRowsLoaded) + ensureAllRowsLoadedRef.current = ensureAllRowsLoaded const isAppendingRowRef = useRef(false) const userPermissions = useUserPermissionsContext() @@ -350,12 +337,9 @@ export function TableGrid({ const deleteWorkflowGroupMutation = useDeleteWorkflowGroup({ workspaceId, tableId }) const updateWorkflowGroupMutation = useUpdateWorkflowGroup({ workspaceId, tableId }) - const handleRunColumn = useCallback( - (groupId: string, runMode: RunMode = 'all', rowIds?: string[]) => { - onRunColumn(groupId, runMode, rowIds) - }, - [onRunColumn] - ) + function handleRunColumn(groupId: string, runMode: RunMode = 'all', rowIds?: string[]) { + onRunColumn(groupId, runMode, rowIds) + } const handleViewWorkflow = useCallback( (workflowId: string) => { @@ -553,6 +537,12 @@ export function TableGrid({ selectionAnchorRef.current = selectionAnchor selectionFocusRef.current = selectionFocus isColumnSelectionRef.current = isColumnSelection + anchorRowIdRef.current = selectionAnchor + ? (rowsRef.current[selectionAnchor.rowIndex]?.id ?? null) + : null + focusRowIdRef.current = selectionFocus + ? (rowsRef.current[selectionFocus.rowIndex]?.id ?? null) + : null const columnRename = useInlineRename({ onSave: (columnName, newName) => { @@ -577,7 +567,7 @@ export function TableGrid({ [] ) - const handleContextMenuEditCell = useCallback(() => { + function handleContextMenuEditCell() { if (contextMenu.row && contextMenu.columnName) { const column = columnsRef.current.find((c) => c.name === contextMenu.columnName) if (column?.type === 'boolean') { @@ -592,9 +582,9 @@ export function TableGrid({ } } closeContextMenu() - }, [contextMenu.row, contextMenu.columnName, closeContextMenu]) + } - const handleContextMenuDelete = useCallback(() => { + function handleContextMenuDelete() { const contextRow = contextMenu.row if (!contextRow) { closeContextMenu() @@ -631,68 +621,54 @@ export function TableGrid({ } closeContextMenu() - }, [contextMenu.row, closeContextMenu, onRequestDeleteRows]) - - const handleInsertRow = useCallback( - (offset: 0 | 1) => { - if (!contextMenu.row) return - const position = contextMenu.row.position + offset - createRef.current( - { data: {}, position }, - { - onSuccess: (response: Record) => { - const newRowId = extractCreatedRowId(response) - if (newRowId) { - pushUndoRef.current({ type: 'create-row', rowId: newRowId, position }) - } - }, - } - ) - closeContextMenu() - }, - [contextMenu.row, closeContextMenu] - ) + } - const handleInsertRowAbove = useCallback(() => handleInsertRow(0), [handleInsertRow]) - const handleInsertRowBelow = useCallback(() => handleInsertRow(1), [handleInsertRow]) + function handleInsertRow(offset: 0 | 1) { + if (!contextMenu.row) return + const position = contextMenu.row.position + offset + createRef.current( + { data: {}, position }, + { + onSuccess: (response: Record) => { + const newRowId = extractCreatedRowId(response) + if (newRowId) { + pushUndoRef.current({ type: 'create-row', rowId: newRowId, position }) + } + }, + } + ) + closeContextMenu() + } - const contextMenuColumnInfo = useMemo<{ - isWorkflowColumn: boolean - executionId: string | null - hasStartedRun: boolean - }>(() => { - if (!contextMenu.row || !contextMenu.columnName) { - return { isWorkflowColumn: false, executionId: null, hasStartedRun: false } - } - const column = columnsRef.current.find((c) => c.name === contextMenu.columnName) - const groupId = column?.workflowGroupId - if (!column || !groupId) { - return { isWorkflowColumn: false, executionId: null, hasStartedRun: false } - } - const exec = contextMenu.row.executions?.[groupId] - // Only `completed` / `error` / `running` cells are guaranteed to have a - // server-side execution log. `queued` / `pending` haven't started yet; - // `cancelled` may have been cancelled before the worker ever picked the - // job up, so its executionId can't be relied on either. - const hasStartedRun = - exec?.status === 'completed' || exec?.status === 'error' || exec?.status === 'running' - return { - isWorkflowColumn: true, - executionId: exec?.executionId ?? null, - hasStartedRun, + const handleInsertRowAbove = () => handleInsertRow(0) + const handleInsertRowBelow = () => handleInsertRow(1) + + let contextMenuExecutionId: string | null = null + let contextMenuIsWorkflowColumn = false + let contextMenuHasStartedRun = false + if (contextMenu.row && contextMenu.columnName) { + const _col = columnsRef.current.find((c) => c.name === contextMenu.columnName) + const _gid = _col?.workflowGroupId + if (_col && _gid) { + const _exec = contextMenu.row.executions?.[_gid] + contextMenuIsWorkflowColumn = true + // Only `completed` / `error` / `running` cells are guaranteed to have a + // server-side execution log. `queued` / `pending` haven't started yet; + // `cancelled` may have been cancelled before the worker ever picked the + // job up, so its executionId can't be relied on either. + contextMenuHasStartedRun = + _exec?.status === 'completed' || _exec?.status === 'error' || _exec?.status === 'running' + contextMenuExecutionId = _exec?.executionId ?? null } - }, [contextMenu.row, contextMenu.columnName]) - const contextMenuExecutionId = contextMenuColumnInfo.executionId - const contextMenuIsWorkflowColumn = contextMenuColumnInfo.isWorkflowColumn - const contextMenuHasStartedRun = contextMenuColumnInfo.hasStartedRun + } - const handleViewExecution = useCallback(() => { + function handleViewExecution() { if (!contextMenuExecutionId) return onOpenExecutionDetails(contextMenuExecutionId) closeContextMenu() - }, [contextMenuExecutionId, onOpenExecutionDetails, closeContextMenu]) + } - const handleDuplicateRow = useCallback(() => { + function handleDuplicateRow() { const contextRow = contextMenu.row if (!contextRow) return const rowData = { ...contextRow.data } @@ -720,7 +696,7 @@ export function TableGrid({ }, } ) - }, [contextMenu.row, closeContextMenu]) + } const handleAppendRow = useCallback(async () => { if (isAppendingRowRef.current) return @@ -1373,18 +1349,6 @@ export function TableGrid({ } }, []) - useEffect(() => { - anchorRowIdRef.current = selectionAnchor - ? (rowsRef.current[selectionAnchor.rowIndex]?.id ?? null) - : null - }, [selectionAnchor]) - - useEffect(() => { - focusRowIdRef.current = selectionFocus - ? (rowsRef.current[selectionFocus.rowIndex]?.id ?? null) - : null - }, [selectionFocus]) - useEffect(() => { // Skip during transient empty-rows state (initial load of a new sort/filter // before keepPreviousData kicks in) — clearing here would lose the user's @@ -1464,10 +1428,6 @@ export function TableGrid({ [] ) - // The cell has `select-none` which suppresses programmatic selection, so we - // override `user-select` on the inner element until the next click. The popover - // only opens when the leaf's scroll dimensions exceed its client dimensions - // (workflow cells nest text inside a span with its own `overflow-clip`). const handleCellDoubleClick = useCallback( (rowId: string, columnName: string, columnKey: string) => { const column = columnsRef.current.find((c) => c.key === columnKey) @@ -1476,54 +1436,27 @@ export function TableGrid({ setSelectionFocus(null) setIsColumnSelection(false) - const rowArrayIndex = rowsRef.current.findIndex((r) => r.id === rowId) - const row = rowArrayIndex !== -1 ? rowsRef.current[rowArrayIndex] : null - - // Workflow-output cell with no value (status pill showing) → enter edit - // mode with a blank input so the user can write a value over the status. - // Escape cancels without persisting. - if (column?.workflowGroupId && row && canEditRef.current) { - const cellValue = row.data[columnName] - if (cellValue === null || cellValue === undefined || cellValue === '') { - setEditingCell({ rowId, columnName }) - setInitialCharacter('') - return - } + // Date/number: use inline editor (calendar picker / numeric input). + if ((column?.type === 'date' || column?.type === 'number') && canEditRef.current) { + setEditingCell({ rowId, columnName }) + setInitialCharacter(null) + return } - const colIndex = columnsRef.current.findIndex((c) => c.key === columnKey) - let overflows = true - if (row && colIndex !== -1) { - const td = document.querySelector( - `[data-table-scroll] [data-row="${rowArrayIndex}"][data-col="${colIndex}"]` - ) - const inner = td?.querySelector(':scope > div:last-child') - if (inner) { - const candidates: HTMLElement[] = [inner] - const descendants = inner.querySelectorAll('*') - for (const el of descendants) candidates.push(el) - overflows = candidates.some( - (el) => el.scrollWidth > el.clientWidth + 1 || el.scrollHeight > el.clientHeight + 1 - ) - - inner.style.userSelect = 'text' - const clear = () => { - inner.style.userSelect = '' - window.removeEventListener('mousedown', clear, true) - } - window.addEventListener('mousedown', clear, true) - - const selection = window.getSelection() - if (selection) { - const range = document.createRange() - range.selectNodeContents(inner) - selection.removeAllRanges() - selection.addRange(range) + // Workflow-output cell with no value → let the user write over the status pill. + if (column?.workflowGroupId && canEditRef.current) { + const row = rowsRef.current.find((r) => r.id === rowId) + if (row) { + const cellValue = row.data[columnName] + if (cellValue === null || cellValue === undefined || cellValue === '') { + setEditingCell({ rowId, columnName }) + setInitialCharacter('') + return } } } - if (overflows) setExpandedCell({ rowId, columnName, columnKey }) + setExpandedCell({ rowId, columnName, columnKey }) }, [] ) @@ -1539,10 +1472,18 @@ export function TableGrid({ const batchUpdateRef = useRef(batchUpdateRowsMutation.mutate) batchUpdateRef.current = batchUpdateRowsMutation.mutate + const batchUpdateAsyncRef = useRef(batchUpdateRowsMutation.mutateAsync) + batchUpdateAsyncRef.current = batchUpdateRowsMutation.mutateAsync const updateMetadataRef = useRef(updateMetadataMutation.mutate) updateMetadataRef.current = updateMetadataMutation.mutate + const deleteWorkflowGroupRef = useRef(deleteWorkflowGroupMutation.mutate) + deleteWorkflowGroupRef.current = deleteWorkflowGroupMutation.mutate + + const updateWorkflowGroupRef = useRef(updateWorkflowGroupMutation.mutate) + updateWorkflowGroupRef.current = updateWorkflowGroupMutation.mutate + const toggleBooleanCellRef = useRef(toggleBooleanCell) toggleBooleanCellRef.current = toggleBooleanCell @@ -1641,27 +1582,30 @@ export function TableGrid({ if (!canEditRef.current) return e.preventDefault() const rowSel = rowSelectionRef.current - const currentRows = rowsRef.current - const currentCols = columnsRef.current - const undoCells: Array<{ rowId: string; data: Record }> = [] - const batchUpdates: Array<{ rowId: string; data: Record }> = [] - for (const row of currentRows) { - if (!rowSelectionIncludes(rowSel, row.id)) continue - const updates: Record = {} - const previousData: Record = {} - for (const col of currentCols) { - previousData[col.name] = row.data[col.name] ?? null - updates[col.name] = null + void (async () => { + const allRows = await ensureAllRowsLoadedRef.current() + const currentCols = columnsRef.current + const undoCells: Array<{ rowId: string; data: Record }> = [] + const batchUpdates: Array<{ rowId: string; data: Record }> = [] + for (const row of allRows) { + if (!rowSelectionIncludes(rowSel, row.id)) continue + const updates: Record = {} + const previousData: Record = {} + for (const col of currentCols) { + previousData[col.name] = row.data[col.name] ?? null + updates[col.name] = null + } + undoCells.push({ rowId: row.id, data: previousData }) + batchUpdates.push({ rowId: row.id, data: updates }) } - undoCells.push({ rowId: row.id, data: previousData }) - batchUpdates.push({ rowId: row.id, data: updates }) - } - if (batchUpdates.length > 0) { - batchUpdateRef.current({ updates: batchUpdates }) - } - if (undoCells.length > 0) { - pushUndoRef.current({ type: 'clear-cells', cells: undoCells }) - } + if (undoCells.length > 0) { + pushUndoRef.current({ type: 'clear-cells', cells: undoCells }) + } + await chunkBatchUpdates(batchUpdates, batchUpdateAsyncRef.current) + })().catch((error) => { + logger.error('Failed to clear selected cells', { error }) + toast.error('Failed to clear cells — please try again') + }) return } @@ -1863,6 +1807,35 @@ export function TableGrid({ e.preventDefault() const sel = computeNormalizedSelection(anchor, selectionFocusRef.current) if (!sel) return + + if (isColumnSelectionRef.current) { + // Column-header selection spans all rows — selection bounds are capped + // to the loaded page count, so drain first then walk the full set. + void (async () => { + const allRows = await ensureAllRowsLoadedRef.current() + const undoCells: Array<{ rowId: string; data: Record }> = [] + const batchUpdates: Array<{ rowId: string; data: Record }> = [] + for (const row of allRows) { + const updates: Record = {} + const previousData: Record = {} + for (let c = sel.startCol; c <= sel.endCol; c++) { + const colName = cols[c]?.name + if (!colName) continue + previousData[colName] = row.data[colName] ?? null + updates[colName] = null + } + undoCells.push({ rowId: row.id, data: previousData }) + batchUpdates.push({ rowId: row.id, data: updates }) + } + if (undoCells.length > 0) pushUndoRef.current({ type: 'clear-cells', cells: undoCells }) + await chunkBatchUpdates(batchUpdates, batchUpdateAsyncRef.current) + })().catch((error) => { + logger.error('Failed to clear column values', { error }) + toast.error('Failed to clear column values — please try again') + }) + return + } + const undoCells: Array<{ rowId: string; data: Record }> = [] const batchUpdates: Array<{ rowId: string; data: Record }> = [] for (let r = sel.startRow; r <= sel.endRow; r++) { @@ -1880,9 +1853,10 @@ export function TableGrid({ undoCells.push({ rowId: row.id, data: previousData }) batchUpdates.push({ rowId: row.id, data: updates }) } - if (batchUpdates.length > 0) { - batchUpdateRef.current({ updates: batchUpdates }) - } + void chunkBatchUpdates(batchUpdates, batchUpdateAsyncRef.current).catch((error) => { + logger.error('Failed to clear selected cells', { error }) + toast.error('Failed to clear cells — please try again') + }) if (undoCells.length > 0) { pushUndoRef.current({ type: 'clear-cells', cells: undoCells }) } @@ -1919,17 +1893,37 @@ export function TableGrid({ if (!rowSelectionIsEmpty(rowSel)) { e.preventDefault() - const lines: string[] = [] - for (const row of currentRows) { - if (!rowSelectionIncludes(rowSel, row.id)) continue - const cells: string[] = cols.map((col) => { - const value: unknown = row.data[col.name] - if (value === null || value === undefined) return '' - return typeof value === 'object' ? JSON.stringify(value) : String(value) - }) - lines.push(cells.join('\t')) - } - e.clipboardData?.setData('text/plain', lines.join('\n')) + void (async () => { + const allRows = await ensureAllRowsLoadedRef.current() + const lines: string[] = [] + for (const row of allRows) { + if (!rowSelectionIncludes(rowSel, row.id)) continue + const cells: string[] = cols.map((col) => { + const value: unknown = row.data[col.name] + if (value === null || value === undefined) return '' + return typeof value === 'object' ? JSON.stringify(value) : String(value) + }) + lines.push(cells.join('\t')) + } + if (!navigator.clipboard) { + toast.error('Clipboard access is unavailable in this context') + return + } + try { + await navigator.clipboard.writeText(lines.join('\n')) + } catch (err) { + if (err instanceof DOMException && err.name === 'NotAllowedError') { + toast.error( + 'Clipboard permission expired — press Cmd+C again immediately after selecting' + ) + } else { + throw err + } + } + })().catch((error) => { + logger.error('Failed to copy selected rows', { error }) + toast.error('Failed to copy — please try again') + }) return } @@ -1940,6 +1934,51 @@ export function TableGrid({ if (!sel) return e.preventDefault() + + if (isColumnSelectionRef.current) { + // Column-header copy spans all rows — drain pages first, then use async + // clipboard so we don't block the event before the drain completes. + void (async () => { + const allRows = await ensureAllRowsLoadedRef.current() + const lines: string[] = [] + for (const row of allRows) { + const cells: string[] = [] + for (let c = sel.startCol; c <= sel.endCol; c++) { + const colName = cols[c]?.name + if (!colName) continue + const value: unknown = row.data[colName] + cells.push( + value === null || value === undefined + ? '' + : typeof value === 'object' + ? JSON.stringify(value) + : String(value) + ) + } + lines.push(cells.join('\t')) + } + if (!navigator.clipboard) { + toast.error('Clipboard access is unavailable in this context') + return + } + try { + await navigator.clipboard.writeText(lines.join('\n')) + } catch (err) { + if (err instanceof DOMException && err.name === 'NotAllowedError') { + toast.error( + 'Clipboard permission expired — press Cmd+C again immediately after selecting' + ) + } else { + throw err + } + } + })().catch((error) => { + logger.error('Failed to copy column cells', { error }) + toast.error('Failed to copy — please try again') + }) + return + } + const lines: string[] = [] for (let r = sel.startRow; r <= sel.endRow; r++) { const cells: string[] = [] @@ -1967,68 +2006,154 @@ export function TableGrid({ const rowSel = rowSelectionRef.current const cols = columnsRef.current const currentRows = rowsRef.current - const undoCells: Array<{ rowId: string; data: Record }> = [] - const batchUpdates: Array<{ rowId: string; data: Record }> = [] if (!rowSelectionIsEmpty(rowSel)) { e.preventDefault() - const lines: string[] = [] - for (const row of currentRows) { - if (!rowSelectionIncludes(rowSel, row.id)) continue - const cells: string[] = cols.map((col) => { - const value: unknown = row.data[col.name] - if (value === null || value === undefined) return '' - return typeof value === 'object' ? JSON.stringify(value) : String(value) - }) - lines.push(cells.join('\t')) - const updates: Record = {} - const previousData: Record = {} - for (const col of cols) { - previousData[col.name] = row.data[col.name] ?? null - updates[col.name] = null + void (async () => { + const allRows = await ensureAllRowsLoadedRef.current() + const lines: string[] = [] + const cutUpdates: Array<{ rowId: string; data: Record }> = [] + const cutUndo: Array<{ rowId: string; data: Record }> = [] + for (const row of allRows) { + if (!rowSelectionIncludes(rowSel, row.id)) continue + const cells: string[] = cols.map((col) => { + const value: unknown = row.data[col.name] + if (value === null || value === undefined) return '' + return typeof value === 'object' ? JSON.stringify(value) : String(value) + }) + lines.push(cells.join('\t')) + const updates: Record = {} + const previousData: Record = {} + for (const col of cols) { + previousData[col.name] = row.data[col.name] ?? null + updates[col.name] = null + } + cutUndo.push({ rowId: row.id, data: previousData }) + cutUpdates.push({ rowId: row.id, data: updates }) } - undoCells.push({ rowId: row.id, data: previousData }) - batchUpdates.push({ rowId: row.id, data: updates }) - } - e.clipboardData?.setData('text/plain', lines.join('\n')) - } else { - const anchor = selectionAnchorRef.current - if (!anchor) return + if (!navigator.clipboard) { + toast.error('Clipboard access is unavailable in this context') + return + } + try { + await navigator.clipboard.writeText(lines.join('\n')) + } catch (err) { + if (err instanceof DOMException && err.name === 'NotAllowedError') { + toast.error( + 'Clipboard permission expired — press Cmd+X again immediately after selecting' + ) + return + } + throw err + } + if (cutUndo.length > 0) { + pushUndoRef.current({ type: 'clear-cells', cells: cutUndo }) + } + if (cutUpdates.length > 0) { + await chunkBatchUpdates(cutUpdates, batchUpdateAsyncRef.current) + } + })().catch((error) => { + logger.error('Failed to cut selected rows', { error }) + toast.error('Failed to cut — please try again') + }) + return + } - const sel = computeNormalizedSelection(anchor, selectionFocusRef.current) - if (!sel) return + const anchor = selectionAnchorRef.current + if (!anchor) return - e.preventDefault() - const lines: string[] = [] - for (let r = sel.startRow; r <= sel.endRow; r++) { - const row = currentRows[r] - if (!row) continue - const cells: string[] = [] - const updates: Record = {} - const previousData: Record = {} - for (let c = sel.startCol; c <= sel.endCol; c++) { - if (c < cols.length) { - const colName = cols[c].name + const sel = computeNormalizedSelection(anchor, selectionFocusRef.current) + if (!sel) return + + e.preventDefault() + + if (isColumnSelectionRef.current) { + // Column-header cut spans all rows — drain pages first, then use async + // clipboard so we don't block the event before the drain completes. + void (async () => { + const allRows = await ensureAllRowsLoadedRef.current() + const lines: string[] = [] + const undoCells: Array<{ rowId: string; data: Record }> = [] + const batchUpdates: Array<{ rowId: string; data: Record }> = [] + for (const row of allRows) { + const cells: string[] = [] + const updates: Record = {} + const previousData: Record = {} + for (let c = sel.startCol; c <= sel.endCol; c++) { + const colName = cols[c]?.name + if (!colName) continue const value: unknown = row.data[colName] - if (value === null || value === undefined) { - cells.push('') - } else { - cells.push(typeof value === 'object' ? JSON.stringify(value) : String(value)) - } + cells.push( + value === null || value === undefined + ? '' + : typeof value === 'object' + ? JSON.stringify(value) + : String(value) + ) previousData[colName] = row.data[colName] ?? null updates[colName] = null } + lines.push(cells.join('\t')) + undoCells.push({ rowId: row.id, data: previousData }) + batchUpdates.push({ rowId: row.id, data: updates }) } - lines.push(cells.join('\t')) - undoCells.push({ rowId: row.id, data: previousData }) - batchUpdates.push({ rowId: row.id, data: updates }) - } - e.clipboardData?.setData('text/plain', lines.join('\n')) + if (!navigator.clipboard) { + toast.error('Clipboard access is unavailable in this context') + return + } + try { + await navigator.clipboard.writeText(lines.join('\n')) + } catch (err) { + if (err instanceof DOMException && err.name === 'NotAllowedError') { + toast.error( + 'Clipboard permission expired — press Cmd+X again immediately after selecting' + ) + return + } + throw err + } + if (undoCells.length > 0) { + pushUndoRef.current({ type: 'clear-cells', cells: undoCells }) + } + await chunkBatchUpdates(batchUpdates, batchUpdateAsyncRef.current) + })().catch((error) => { + logger.error('Failed to cut column cells', { error }) + toast.error('Failed to cut — please try again') + }) + return } - if (batchUpdates.length > 0) { - batchUpdateRef.current({ updates: batchUpdates }) + const lines: string[] = [] + const undoCells: Array<{ rowId: string; data: Record }> = [] + const batchUpdates: Array<{ rowId: string; data: Record }> = [] + for (let r = sel.startRow; r <= sel.endRow; r++) { + const row = currentRows[r] + if (!row) continue + const cells: string[] = [] + const updates: Record = {} + const previousData: Record = {} + for (let c = sel.startCol; c <= sel.endCol; c++) { + if (c < cols.length) { + const colName = cols[c].name + const value: unknown = row.data[colName] + if (value === null || value === undefined) { + cells.push('') + } else { + cells.push(typeof value === 'object' ? JSON.stringify(value) : String(value)) + } + previousData[colName] = row.data[colName] ?? null + updates[colName] = null + } + } + lines.push(cells.join('\t')) + undoCells.push({ rowId: row.id, data: previousData }) + batchUpdates.push({ rowId: row.id, data: updates }) } + e.clipboardData?.setData('text/plain', lines.join('\n')) + void chunkBatchUpdates(batchUpdates, batchUpdateAsyncRef.current).catch((error) => { + logger.error('Failed to cut selected cells', { error }) + toast.error('Failed to cut — please try again') + }) if (undoCells.length > 0) { pushUndoRef.current({ type: 'clear-cells', cells: undoCells }) } @@ -2299,17 +2424,14 @@ export function TableGrid({ * Open the column-config sidebar pre-seeded with the chosen scalar type. * Nothing is persisted until the user fills in the name and hits Save. */ - const handleAddColumnOfType = useCallback( - (type: ColumnDefinition['type']) => { - onOpenColumnConfig({ mode: 'create', proposedName: generateColumnName(), type }) - }, - [generateColumnName, onOpenColumnConfig] - ) + function handleAddColumnOfType(type: ColumnDefinition['type']) { + onOpenColumnConfig({ mode: 'create', proposedName: generateColumnName(), type }) + } /** Open the workflow-config sidebar to spawn a brand-new workflow group. */ - const handleAddWorkflowColumn = useCallback(() => { + function handleAddWorkflowColumn() { onOpenWorkflowConfig({ mode: 'create', proposedName: generateColumnName() }) - }, [generateColumnName, onOpenWorkflowConfig]) + } const handleConfigureColumn = useCallback( (columnName: string) => { @@ -2331,12 +2453,9 @@ export function TableGrid({ [onOpenWorkflowConfig] ) - const handleDeleteWorkflowGroup = useCallback( - (groupId: string) => { - deleteWorkflowGroupMutation.mutate({ groupId }) - }, - [deleteWorkflowGroupMutation] - ) + const handleDeleteWorkflowGroup = useCallback((groupId: string) => { + deleteWorkflowGroupRef.current({ groupId }) + }, []) /** * Computes the names slated for deletion given a click on `columnName` and @@ -2370,35 +2489,32 @@ export function TableGrid({ * logs. Only valid when removing the columns leaves every affected group * with at least one surviving output — caller must check first. */ - const hideWorkflowOutputColumns = useCallback( - (names: string[]) => { - const schemaCols = schemaColumnsRef.current - const groups = workflowGroupsRef.current - const removalsByGroup = new Map>() - for (const name of names) { - const def = schemaCols.find((c) => c.name === name) - if (!def?.workflowGroupId) return false - const set = removalsByGroup.get(def.workflowGroupId) ?? new Set() - set.add(name) - removalsByGroup.set(def.workflowGroupId, set) - } - for (const [groupId, removed] of removalsByGroup) { - const group = groups.find((g) => g.id === groupId) - if (!group) return false - const remaining = group.outputs.filter((o) => !removed.has(o.columnName)) - if (remaining.length === 0) return false - updateWorkflowGroupMutation.mutate({ - groupId: group.id, - workflowId: group.workflowId, - name: group.name, - dependencies: group.dependencies, - outputs: remaining, - }) - } - return true - }, - [updateWorkflowGroupMutation] - ) + const hideWorkflowOutputColumns = useCallback((names: string[]) => { + const schemaCols = schemaColumnsRef.current + const groups = workflowGroupsRef.current + const removalsByGroup = new Map>() + for (const name of names) { + const def = schemaCols.find((c) => c.name === name) + if (!def?.workflowGroupId) return false + const set = removalsByGroup.get(def.workflowGroupId) ?? new Set() + set.add(name) + removalsByGroup.set(def.workflowGroupId, set) + } + for (const [groupId, removed] of removalsByGroup) { + const group = groups.find((g) => g.id === groupId) + if (!group) return false + const remaining = group.outputs.filter((o) => !removed.has(o.columnName)) + if (remaining.length === 0) return false + updateWorkflowGroupRef.current({ + groupId: group.id, + workflowId: group.workflowId, + name: group.name, + dependencies: group.dependencies, + outputs: remaining, + }) + } + return true + }, []) const handleDeleteColumn = useCallback( (columnName: string) => { @@ -2688,12 +2804,12 @@ export function TableGrid({ const selectedRunScope = useMemo(() => { if (tableWorkflowGroupIds.length === 0) return null if (!rowSelectionIsEmpty(rowSelection)) { - const rowIds: string[] = [] - for (const row of rows) { - if (rowSelectionIncludes(rowSelection, row.id)) rowIds.push(row.id) + if (rowSelection.kind === 'all') { + return { groupIds: tableWorkflowGroupIds, rowIds: rows.map((r) => r.id), allRows: true } } + const rowIds = rows.filter((r) => rowSelectionIncludes(rowSelection, r.id)).map((r) => r.id) if (rowIds.length === 0) return null - return { groupIds: tableWorkflowGroupIds, rowIds } + return { groupIds: tableWorkflowGroupIds, rowIds, allRows: false } } const sel = normalizedSelection if (!sel) return null @@ -2711,7 +2827,7 @@ export function TableGrid({ if (row) rowIds.push(row.id) } if (rowIds.length === 0) return null - return { groupIds: [...groupIdsInRect], rowIds } + return { groupIds: [...groupIdsInRect], rowIds, allRows: false } }, [rowSelection, normalizedSelection, rows, displayColumns, tableWorkflowGroupIds]) const selectionStats = useMemo(() => { @@ -2788,13 +2904,6 @@ export function TableGrid({ singleWorkflowCell, ]) - const handleRunRow = useCallback( - (rowId: string) => { - onRunRow(rowId) - }, - [onRunRow] - ) - if (!isLoadingTable && !tableData) { return (
    @@ -3036,7 +3145,7 @@ export function TableGrid({ hasWorkflowColumns={hasWorkflowColumns} numDivWidth={numDivWidth} onStopRow={onStopRow} - onRunRow={handleRunRow} + onRunRow={onRunRow} workflowGroups={tableWorkflowGroups} /> ))} @@ -3116,403 +3225,3 @@ export function TableGrid({
    ) } - -const TableColGroup = React.memo(function TableColGroup({ - columns, - columnWidths, - checkboxColWidth, -}: { - columns: DisplayColumn[] - columnWidths: Record - checkboxColWidth: number -}) { - return ( -
    - - {columns.map((col) => ( - - ))} - - - ) -}) - -interface DataRowProps { - row: TableRowType - columns: DisplayColumn[] - rowIndex: number - isFirstRow: boolean - editingColumnName: string | null - initialCharacter: string | null - pendingCellValue: Record | null - normalizedSelection: NormalizedSelection | null - onClick: (rowId: string, columnName: string, options?: { toggleBoolean?: boolean }) => void - onDoubleClick: (rowId: string, columnName: string, columnKey: string) => void - onSave: (rowId: string, columnName: string, value: unknown, reason: SaveReason) => void - onCancel: () => void - onContextMenu: (e: React.MouseEvent, row: TableRowType) => void - onCellMouseDown: (rowIndex: number, colIndex: number, shiftKey: boolean) => void - onCellMouseEnter: (rowIndex: number, colIndex: number) => void - isRowChecked: boolean - onRowToggle: (rowIndex: number, shiftKey: boolean) => void - /** Number of workflow cells in this row currently in a running/queued state. */ - runningCount: number - /** Whether the table has at least one workflow column — controls whether a run/stop icon is rendered. */ - hasWorkflowColumns: boolean - /** Width of the row-number inner div in px, derived from the table's maxRows digit count. */ - numDivWidth: number - onStopRow: (rowId: string) => void - onRunRow: (rowId: string) => void - /** - * The table's workflow groups, used to compute per-row "Waiting on …" labels - * for empty workflow-output cells whose group has unmet dependencies. - */ - workflowGroups: WorkflowGroup[] -} - -function cellRangeRowChanged( - rowIndex: number, - colCount: number, - prev: NormalizedSelection | null, - next: NormalizedSelection | null -): boolean { - const pIn = prev !== null && rowIndex >= prev.startRow && rowIndex <= prev.endRow - const nIn = next !== null && rowIndex >= next.startRow && rowIndex <= next.endRow - const pAnchor = prev !== null && rowIndex === prev.anchorRow - const nAnchor = next !== null && rowIndex === next.anchorRow - - if (!pIn && !nIn && !pAnchor && !nAnchor) return false - if (pIn !== nIn || pAnchor !== nAnchor) return true - - if (pIn && nIn) { - if (prev!.startCol !== next!.startCol || prev!.endCol !== next!.endCol) return true - if ((rowIndex === prev!.startRow) !== (rowIndex === next!.startRow)) return true - if ((rowIndex === prev!.endRow) !== (rowIndex === next!.endRow)) return true - const pMulti = prev!.startRow !== prev!.endRow || prev!.startCol !== prev!.endCol - const nMulti = next!.startRow !== next!.endRow || next!.startCol !== next!.endCol - if (pMulti !== nMulti) return true - const pFull = prev!.startCol === 0 && prev!.endCol === colCount - 1 - const nFull = next!.startCol === 0 && next!.endCol === colCount - 1 - if (pFull !== nFull) return true - } - - if (pAnchor && nAnchor && prev!.anchorCol !== next!.anchorCol) return true - - return false -} - -function dataRowPropsAreEqual(prev: DataRowProps, next: DataRowProps): boolean { - if ( - prev.row !== next.row || - prev.columns !== next.columns || - prev.rowIndex !== next.rowIndex || - prev.isFirstRow !== next.isFirstRow || - prev.editingColumnName !== next.editingColumnName || - prev.pendingCellValue !== next.pendingCellValue || - prev.onClick !== next.onClick || - prev.onDoubleClick !== next.onDoubleClick || - prev.onSave !== next.onSave || - prev.onCancel !== next.onCancel || - prev.onContextMenu !== next.onContextMenu || - prev.onCellMouseDown !== next.onCellMouseDown || - prev.onCellMouseEnter !== next.onCellMouseEnter || - prev.isRowChecked !== next.isRowChecked || - prev.onRowToggle !== next.onRowToggle || - prev.runningCount !== next.runningCount || - prev.hasWorkflowColumns !== next.hasWorkflowColumns || - prev.numDivWidth !== next.numDivWidth || - prev.onStopRow !== next.onStopRow || - prev.onRunRow !== next.onRunRow || - prev.workflowGroups !== next.workflowGroups - ) { - return false - } - if ( - (prev.editingColumnName !== null || next.editingColumnName !== null) && - prev.initialCharacter !== next.initialCharacter - ) { - return false - } - - return !cellRangeRowChanged( - prev.rowIndex, - prev.columns.length, - prev.normalizedSelection, - next.normalizedSelection - ) -} - -const DataRow = React.memo(function DataRow({ - row, - columns, - rowIndex, - isFirstRow, - editingColumnName, - initialCharacter, - pendingCellValue, - normalizedSelection, - isRowChecked, - onClick, - onDoubleClick, - onSave, - onCancel, - onContextMenu, - onCellMouseDown, - onCellMouseEnter, - onRowToggle, - runningCount, - hasWorkflowColumns, - numDivWidth, - onStopRow, - onRunRow, - workflowGroups, -}: DataRowProps) { - const sel = normalizedSelection - /** - * Per-row "Waiting on …" labels keyed by group id. A group has labels iff - * at least one of its dependencies is unmet for this row — drives the - * "Waiting" pill rendered by `CellContent` for empty workflow-output cells. - * Computed once per render rather than per cell so all cells in a group - * share the same array reference. - */ - const waitingByGroupId = React.useMemo(() => { - if (workflowGroups.length === 0) return null - const map = new Map() - for (const group of workflowGroups) { - // autoRun=false groups never fire from the scheduler — there's nothing - // to wait on. The cell stays empty until the user clicks Run manually. - if (group.autoRun === false) continue - const unmet = getUnmetGroupDeps(group, row) - if (unmet.columns.length === 0) continue - map.set(group.id, unmet.columns) - } - return map - }, [workflowGroups, row]) - const isMultiCell = sel !== null && (sel.startRow !== sel.endRow || sel.startCol !== sel.endCol) - const isRowSelected = isRowChecked - - return ( - onContextMenu(e, row)}> - - {columns.map((column, colIndex) => { - const inRange = - sel !== null && - rowIndex >= sel.startRow && - rowIndex <= sel.endRow && - colIndex >= sel.startCol && - colIndex <= sel.endCol - const isAnchor = sel !== null && rowIndex === sel.anchorRow && colIndex === sel.anchorCol - const isEditing = editingColumnName === column.name - const isHighlighted = inRange || isRowChecked - - const isTopEdge = inRange ? rowIndex === sel!.startRow : isRowChecked - const isBottomEdge = inRange ? rowIndex === sel!.endRow : isRowChecked - const isLeftEdge = inRange ? colIndex === sel!.startCol : colIndex === 0 - const isRightEdge = inRange ? colIndex === sel!.endCol : colIndex === columns.length - 1 - - return ( - - ) - })} - - ) -}, dataRowPropsAreEqual) - -const TableBodySkeleton = React.memo(function TableBodySkeleton({ - colCount, -}: { - colCount: number -}) { - return ( - <> - {Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => ( - - - {Array.from({ length: colCount }).map((_, colIndex) => { - const width = 72 + ((rowIndex + colIndex) % 4) * 24 - return ( - - ) - })} - - ))} - - ) -}) - -const SelectAllCheckbox = React.memo(function SelectAllCheckbox({ - checked, - onCheckedChange, -}: { - checked: boolean - onCheckedChange: () => void -}) { - return ( - - ) -}) - -const AddRowButton = React.memo(function AddRowButton({ onClick }: { onClick: () => void }) { - return ( -
    - -
    - ) -}) diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-primitives.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-primitives.tsx new file mode 100644 index 00000000000..20b8d02db89 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-primitives.tsx @@ -0,0 +1,114 @@ +'use client' + +import React from 'react' +import { Button, Checkbox, Skeleton } from '@/components/emcn' +import { Plus } from '@/components/emcn/icons' +import { cn } from '@/lib/core/utils/cn' +import { + ADD_COL_WIDTH, + CELL, + CELL_CHECKBOX, + CELL_HEADER_CHECKBOX, + COL_WIDTH, + SKELETON_ROW_COUNT, +} from './constants' +import type { DisplayColumn } from './types' + +export const TableColGroup = React.memo(function TableColGroup({ + columns, + columnWidths, + checkboxColWidth, +}: { + columns: DisplayColumn[] + columnWidths: Record + checkboxColWidth: number +}) { + return ( +
    + + {columns.map((col) => ( + + ))} + + + ) +}) + +export const TableBodySkeleton = React.memo(function TableBodySkeleton({ + colCount, +}: { + colCount: number +}) { + return ( + <> + {Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => ( + + + {Array.from({ length: colCount }).map((_, colIndex) => { + const width = 72 + ((rowIndex + colIndex) % 4) * 24 + return ( + + ) + })} + + ))} + + ) +}) + +export const SelectAllCheckbox = React.memo(function SelectAllCheckbox({ + checked, + onCheckedChange, +}: { + checked: boolean + onCheckedChange: () => void +}) { + return ( + + ) +}) + +export const AddRowButton = React.memo(function AddRowButton({ onClick }: { onClick: () => void }) { + return ( +
    + +
    + ) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/utils.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/utils.ts index 73fa8db9c49..a0efce3b877 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/utils.ts @@ -8,6 +8,49 @@ import type { import type { DeletedRowSnapshot } from '@/stores/table/types' import type { DisplayColumn } from './types' +export type RowSelection = { kind: 'none' } | { kind: 'some'; ids: Set } | { kind: 'all' } + +export const ROW_SELECTION_NONE: RowSelection = { kind: 'none' } +export const ROW_SELECTION_ALL: RowSelection = { kind: 'all' } + +export function rowSelectionIncludes(sel: RowSelection, id: string): boolean { + if (sel.kind === 'all') return true + if (sel.kind === 'some') return sel.ids.has(id) + return false +} + +export function rowSelectionIsEmpty(sel: RowSelection): boolean { + if (sel.kind === 'none') return true + if (sel.kind === 'some') return sel.ids.size === 0 + return false +} + +export function rowSelectionMaterialize(sel: RowSelection, rows: TableRowType[]): Set { + if (sel.kind === 'all') return new Set(rows.map((r) => r.id)) + if (sel.kind === 'some') return new Set(sel.ids) + return new Set() +} + +export function rowSelectionCoversAll(sel: RowSelection, rows: TableRowType[]): boolean { + if (rows.length === 0) return false + if (sel.kind === 'all') return true + if (sel.kind === 'none') return false + if (sel.ids.size < rows.length) return false + for (const r of rows) if (!sel.ids.has(r.id)) return false + return true +} + +/** Returns sticky row-number column dimensions sized to the digit count of `maxRows`. */ +export function checkboxColLayout( + maxRows: number, + hasWorkflowCols: boolean +): { colWidth: number; numDivWidth: number } { + const digits = maxRows > 0 ? Math.floor(Math.log10(maxRows)) + 1 : 1 + const numDivWidth = Math.max(20, digits * 8 + 4) + const colWidth = Math.max(32, numDivWidth + 8) + (hasWorkflowCols ? 16 : 0) + return { colWidth, numDivWidth } +} + export interface CellCoord { rowIndex: number colIndex: number diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.test.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.test.ts new file mode 100644 index 00000000000..6283732dafc --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.test.ts @@ -0,0 +1,238 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +// Capture useEffect calls so tests can trigger them manually. +const capturedEffects: Array<() => undefined | (() => void)> = [] + +// Mock React hooks to be passthrough so useTable() can be called without a +// React root. useCallback returns its function arg; useMemo executes +// immediately; useEffect is captured for manual triggering. +vi.mock('react', () => ({ + useCallback: (fn: unknown) => fn, + useMemo: (fn: () => unknown) => fn(), + useEffect: (fn: () => undefined | (() => void)) => { + capturedEffects.push(fn) + }, + useRef: (init: unknown) => ({ current: init }), +})) + +const mockGetQueryData = vi.fn() +const mockFetchNextPage = vi.fn() +const mockQueryClient = { + getQueryData: mockGetQueryData, +} + +vi.mock('@tanstack/react-query', () => ({ + useQueryClient: vi.fn(() => mockQueryClient), +})) + +vi.mock('@/hooks/queries/tables', () => ({ + tableRowsInfiniteOptions: vi.fn(({ tableId, pageSize, filter, sort }) => ({ + queryKey: [ + 'tables', + 'detail', + tableId, + 'rows', + 'infinite', + JSON.stringify({ pageSize, filter, sort }), + ], + queryFn: vi.fn(), + initialPageParam: 0, + staleTime: 30000, + })), + useInfiniteTableRows: vi.fn(() => ({ + data: { pages: [] }, + isLoading: false, + refetch: vi.fn().mockResolvedValue(undefined), + fetchNextPage: mockFetchNextPage, + hasNextPage: false, + isFetchingNextPage: false, + })), + useTable: vi.fn(() => ({ + data: undefined, + isLoading: false, + })), +})) + +vi.mock('@/hooks/queries/workflows', () => ({ + useWorkflows: vi.fn(() => ({ data: undefined })), + useWorkflowStates: vi.fn(() => new Map()), +})) + +vi.mock('@/blocks', () => ({ + getBlock: vi.fn(() => undefined), +})) + +vi.mock('@/lib/table/constants', () => ({ + TABLE_LIMITS: { MAX_QUERY_LIMIT: 1000 }, +})) + +import { useTable } from '@/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table' + +const WORKSPACE_ID = 'ws-1' +const TABLE_ID = 'tbl-1' +const QUERY_OPTIONS = { filter: null, sort: null } + +function makeRow(id: string, position: number) { + return { id, data: { name: `Row ${id}` }, position, executions: {} } +} + +function makePages(rowsPerPage: number[], totalCount: number) { + return rowsPerPage.map((count, pageIdx) => ({ + rows: Array.from({ length: count }, (_, i) => + makeRow(`r${pageIdx * 1000 + i}`, pageIdx * 1000 + i) + ), + totalCount, + })) +} + +const OK = { status: 'success', hasNextPage: false } as const + +beforeEach(() => { + capturedEffects.length = 0 + vi.clearAllMocks() + mockGetQueryData.mockReturnValue(undefined) + mockFetchNextPage.mockResolvedValue(OK) +}) + +describe('useTable – ensureAllRowsLoaded', () => { + it('returns an empty array when cache is empty', async () => { + mockGetQueryData.mockReturnValue(undefined) + const { ensureAllRowsLoaded } = useTable({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + queryOptions: QUERY_OPTIONS, + }) + const rows = await ensureAllRowsLoaded() + expect(rows).toEqual([]) + expect(mockFetchNextPage).not.toHaveBeenCalled() + }) + + it('returns rows from cache immediately when last page is partial (< 1 page)', async () => { + const [page] = makePages([3], 3) + mockGetQueryData.mockReturnValue({ pages: [page] }) + const { ensureAllRowsLoaded } = useTable({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + queryOptions: QUERY_OPTIONS, + }) + const rows = await ensureAllRowsLoaded() + expect(rows).toHaveLength(3) + expect(rows.map((r) => r.id)).toEqual(['r0', 'r1', 'r2']) + // Cache already complete — no HTTP request needed. + expect(mockFetchNextPage).not.toHaveBeenCalled() + }) + + it('returns rows from cache immediately when last page is exactly one full page', async () => { + // A full page means getNextPageParam returns the next offset, so we must + // fetch once to confirm there is no page 2 (which returns 0 rows). After + // that empty page the last page is partial (0 < 1000) and the loop breaks. + const [page0] = makePages([1000], 1000) + const emptyPage = { rows: [], totalCount: 1000 } + mockGetQueryData + .mockReturnValueOnce({ pages: [page0] }) // loop iter 1: full → fetch + .mockReturnValueOnce({ pages: [page0, emptyPage] }) // loop iter 2: empty → break + .mockReturnValue({ pages: [page0, emptyPage] }) // final read + const { ensureAllRowsLoaded } = useTable({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + queryOptions: QUERY_OPTIONS, + }) + const rows = await ensureAllRowsLoaded() + expect(rows).toHaveLength(1000) + expect(rows[0].id).toBe('r0') + expect(rows[999].id).toBe('r999') + expect(mockFetchNextPage).toHaveBeenCalledTimes(1) + }) + + it('fetches one page when last cached page is full and there is more data', async () => { + const [page0, page1] = makePages([1000, 500], 1500) + mockGetQueryData + .mockReturnValueOnce({ pages: [page0] }) // loop iter 1: full → fetch + .mockReturnValueOnce({ pages: [page0, page1] }) // loop iter 2: partial → break + .mockReturnValue({ pages: [page0, page1] }) // final read + const { ensureAllRowsLoaded } = useTable({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + queryOptions: QUERY_OPTIONS, + }) + const rows = await ensureAllRowsLoaded() + expect(rows).toHaveLength(1500) + expect(rows[0].id).toBe('r0') + expect(rows[1000].id).toBe('r1000') + expect(mockFetchNextPage).toHaveBeenCalledTimes(1) + }) + + it('fetches multiple pages for a large table until a partial page terminates the drain', async () => { + const [page0, page1, page2] = makePages([1000, 1000, 500], 2500) + mockGetQueryData + .mockReturnValueOnce({ pages: [page0] }) // iter 1: full → fetch + .mockReturnValueOnce({ pages: [page0, page1] }) // iter 2: full → fetch + .mockReturnValueOnce({ pages: [page0, page1, page2] }) // iter 3: partial → break + .mockReturnValue({ pages: [page0, page1, page2] }) // final read + const { ensureAllRowsLoaded } = useTable({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + queryOptions: QUERY_OPTIONS, + }) + const rows = await ensureAllRowsLoaded() + expect(rows).toHaveLength(2500) + expect(rows[0].id).toBe('r0') + expect(rows[1000].id).toBe('r1000') + expect(rows[2499].id).toBe('r2499') + expect(mockFetchNextPage).toHaveBeenCalledTimes(2) + }) + + it('throws when fetchNextPage returns an error status', async () => { + const [page0] = makePages([1000], 2000) + mockGetQueryData.mockReturnValue({ pages: [page0] }) + const error = new Error('Network failure') + mockFetchNextPage.mockResolvedValueOnce({ status: 'error', error }) + const { ensureAllRowsLoaded } = useTable({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + queryOptions: QUERY_OPTIONS, + }) + await expect(ensureAllRowsLoaded()).rejects.toThrow('Network failure') + }) + + it('does not call fetchNextPage or getQueryData when workspaceId is empty', async () => { + const { ensureAllRowsLoaded } = useTable({ + workspaceId: '', + tableId: TABLE_ID, + queryOptions: QUERY_OPTIONS, + }) + const rows = await ensureAllRowsLoaded() + expect(rows).toEqual([]) + expect(mockFetchNextPage).not.toHaveBeenCalled() + expect(mockGetQueryData).not.toHaveBeenCalled() + }) + + it('does not call fetchNextPage or getQueryData when tableId is empty', async () => { + const { ensureAllRowsLoaded } = useTable({ + workspaceId: WORKSPACE_ID, + tableId: '', + queryOptions: QUERY_OPTIONS, + }) + const rows = await ensureAllRowsLoaded() + expect(rows).toEqual([]) + expect(mockFetchNextPage).not.toHaveBeenCalled() + expect(mockGetQueryData).not.toHaveBeenCalled() + }) + + it('encodes queryOptions.filter into the queryKey passed to getQueryData', async () => { + const filter = { column: 'name', operator: 'eq', value: 'Alice' } as never + const [page] = makePages([3], 3) + mockGetQueryData.mockReturnValue({ pages: [page] }) + const { ensureAllRowsLoaded } = useTable({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + queryOptions: { filter, sort: null }, + }) + await ensureAllRowsLoaded() + const queryKey = mockGetQueryData.mock.calls[0][0] as unknown[] + expect(JSON.stringify(queryKey)).toContain('Alice') + }) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.ts index b3d5b311ad4..2b36bda1a9b 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.ts @@ -1,11 +1,16 @@ 'use client' import { useCallback, useMemo } from 'react' +import { useQueryClient } from '@tanstack/react-query' import type { ColumnDefinition, TableDefinition, TableRow, WorkflowGroup } from '@/lib/table' import { TABLE_LIMITS } from '@/lib/table/constants' import type { FlattenOutputsBlockInput } from '@/lib/workflows/blocks/flatten-outputs' import { getBlock } from '@/blocks' -import { useInfiniteTableRows, useTable as useTableQuery } from '@/hooks/queries/tables' +import { + tableRowsInfiniteOptions, + useInfiniteTableRows, + useTable as useTableQuery, +} from '@/hooks/queries/tables' import { useWorkflowStates, useWorkflows } from '@/hooks/queries/workflows' import type { WorkflowMetadata } from '@/stores/workflows/registry/types' import type { WorkflowState } from '@/stores/workflows/workflow/types' @@ -26,46 +31,41 @@ interface FetchNextPageResult { } export interface UseTableReturn { - /** Table definition (name, schema, metadata, etc.). */ tableData: TableDefinition | undefined isLoadingTable: boolean - /** Flattened rows across every fetched page. */ + /** Flattened across every fetched infinite-query page. */ rows: TableRow[] isLoadingRows: boolean refetchRows: () => void /** - * Fetch the next page of rows. The resolved value's `hasNextPage` reflects - * the post-fetch cache state — read from this rather than the parent's - * `hasNextPage` state, which only updates on the next React render. + * The resolved value's `hasNextPage` reflects the post-fetch cache state — + * read from this rather than the hook's `hasNextPage`, which only updates on + * the next React render. */ fetchNextPage: () => Promise hasNextPage: boolean isFetchingNextPage: boolean - /** Workspace-wide workflow metadata used by header chips and the column sidebar. */ workflows: WorkflowMetadata[] | undefined - /** Stable reference to `tableData?.schema?.columns ?? []`. */ columns: ColumnDefinition[] - /** Stable reference to `tableData?.schema?.workflowGroups ?? []`. */ tableWorkflowGroups: WorkflowGroup[] - /** Pre-fetched live state for every unique workflow id used by the table. */ workflowStates: Map - /** Pre-resolved icon + block-name info per output column name. Headers read - * from this map instead of each subscribing to its own workflow-state query. */ + /** Headers read from this map instead of each subscribing to its own workflow-state query. */ columnSourceInfo: Map + /** + * Fetches any missing pages then returns the full flat row list from cache. + * Safe to read immediately — no React re-render required. Gate bulk ops that + * need the complete row set behind this. + */ + ensureAllRowsLoaded: () => Promise } /** - * Coordinator hook for the table view's data layer. Wraps row/schema/workflow - * fetching and exposes the derived collections every consumer needs (display - * columns, source-info map, workflow-name lookup). Mirrors the shape of - * `use-chat`'s coordinator: one hook returning a typed bundle the surface - * component destructures. - * - * Local interaction state (drag, resize, selection, editing) stays in the - * `Table` component — moving that here would push every keystroke through a - * single hook return and re-render the world. + * Local interaction state (drag, resize, selection, editing) intentionally + * stays in the `Table` component — moving it here would push every keystroke + * through this hook's return value and re-render everything. */ export function useTable({ workspaceId, tableId, queryOptions }: UseTableParams): UseTableReturn { + const queryClient = useQueryClient() const { data: tableData, isLoading: isLoadingTable } = useTableQuery(workspaceId, tableId) const { @@ -93,6 +93,32 @@ export function useTable({ workspaceId, tableId, queryOptions }: UseTableParams) void refetch() }, [refetch]) + const ensureAllRowsLoaded = useCallback(async (): Promise => { + if (!workspaceId || !tableId) return [] + + const opts = tableRowsInfiniteOptions({ + workspaceId, + tableId, + pageSize: TABLE_LIMITS.MAX_QUERY_LIMIT, + filter: queryOptions.filter, + sort: queryOptions.sort, + }) + + // getQueryData bypasses React's render cycle — pages added by fetchNextPage + // are visible synchronously after each await without waiting for a re-render. + while (true) { + const data = queryClient.getQueryData(opts.queryKey) + const lastPage = data?.pages[data.pages.length - 1] + if (!lastPage || lastPage.rows.length < TABLE_LIMITS.MAX_QUERY_LIMIT) break + const result = await fetchNextPage() + if (result.status === 'error') { + throw result.error ?? new Error('Failed to load table rows') + } + } + + return queryClient.getQueryData(opts.queryKey)?.pages.flatMap((p) => p.rows) ?? [] + }, [workspaceId, tableId, queryOptions.filter, queryOptions.sort, queryClient, fetchNextPage]) + const fetchNextPageWrapped = useCallback(async () => { const result = await fetchNextPage() if (result.status === 'error') { @@ -149,5 +175,6 @@ export function useTable({ workspaceId, tableId, queryOptions }: UseTableParams) tableWorkflowGroups, workflowStates, columnSourceInfo, + ensureAllRowsLoaded, } } diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table.tsx index 91eeb93595e..9ce165deb3f 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table.tsx @@ -513,17 +513,29 @@ export function Table({ hasWorkflowColumns={selection.hasWorkflowColumns} showPlay={selection.selectionStats.hasIncompleteOrFailed} showRefresh={selection.selectionStats.hasCompleted} - onPlay={() => - selection.selectedRunScope && - runScope({ ...selection.selectedRunScope, runMode: 'incomplete' }) - } - onRefresh={() => - selection.selectedRunScope && - runScope({ ...selection.selectedRunScope, runMode: 'all' }) - } - onStopWorkflows={() => - selection.selectedRunScope && onStopRows(selection.selectedRunScope.rowIds) - } + onPlay={() => { + const scope = selection.selectedRunScope + if (!scope) return + runScope({ + groupIds: scope.groupIds, + rowIds: scope.allRows ? undefined : scope.rowIds, + runMode: 'incomplete', + }) + }} + onRefresh={() => { + const scope = selection.selectedRunScope + if (!scope) return + runScope({ + groupIds: scope.groupIds, + rowIds: scope.allRows ? undefined : scope.rowIds, + runMode: 'all', + }) + }} + onStopWorkflows={() => { + const scope = selection.selectedRunScope + if (!scope) return + scope.allRows ? onStopAll() : onStopRows(scope.rowIds) + }} onViewExecution={ selection.singleWorkflowCell?.canViewExecution && selection.singleWorkflowCell.executionId diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/utils.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/utils.ts index 75c57b61999..55e310c3630 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/utils.ts @@ -45,7 +45,7 @@ export function cleanCellValue(value: unknown, column: ColumnDefinition): unknow if (column.type === 'number') { if (value === '') return null const num = Number(value) - return Number.isNaN(num) ? 0 : num + return Number.isNaN(num) ? null : num } if (column.type === 'json') { if (typeof value === 'string') { diff --git a/apps/sim/app/workspace/[workspaceId]/tables/tables.tsx b/apps/sim/app/workspace/[workspaceId]/tables/tables.tsx index 3389ab35fcf..9070acbc23c 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/tables.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/tables.tsx @@ -399,7 +399,6 @@ export function Tables() { } setUploadProgress({ completed: 0, total: csvFiles.length }) - const failed: string[] = [] for (let i = 0; i < csvFiles.length; i++) { try { @@ -412,23 +411,13 @@ export function Tables() { } } } catch (err) { - failed.push(csvFiles[i].name) logger.error('Error uploading CSV:', err) } finally { setUploadProgress({ completed: i + 1, total: csvFiles.length }) } } - - if (failed.length > 0) { - toast.error( - failed.length === 1 - ? `Failed to import ${failed[0]}` - : `Failed to import ${failed.length} file${failed.length > 1 ? 's' : ''}: ${failed.join(', ')}` - ) - } } catch (err) { logger.error('Error uploading CSV:', err) - toast.error('Failed to import CSV') } finally { setUploading(false) setUploadProgress({ completed: 0, total: 0 }) diff --git a/apps/sim/hooks/queries/tables.test.ts b/apps/sim/hooks/queries/tables.test.ts index c2c8a12724b..70e8411df54 100644 --- a/apps/sim/hooks/queries/tables.test.ts +++ b/apps/sim/hooks/queries/tables.test.ts @@ -31,6 +31,7 @@ const { queryClient, cacheStore } = vi.hoisted(() => { vi.mock('@tanstack/react-query', () => ({ keepPreviousData: {}, + infiniteQueryOptions: (opts: unknown) => opts, useQuery: vi.fn(), useInfiniteQuery: vi.fn(), useQueryClient: vi.fn(() => queryClient), @@ -80,7 +81,13 @@ vi.mock('@/components/emcn', () => ({ toast: { error: vi.fn(), success: vi.fn() }, })) -import { tableKeys, useDeleteColumn, useUpdateColumn } from '@/hooks/queries/tables' +import { + tableKeys, + tableRowsInfiniteOptions, + tableRowsParamsKey, + useDeleteColumn, + useUpdateColumn, +} from '@/hooks/queries/tables' const TABLE_ID = 'tbl-1' const WORKSPACE_ID = 'ws-1' @@ -249,3 +256,117 @@ describe('useDeleteColumn case-insensitive row cleanup', () => { expect(rows?.rows[0]?.data).toEqual({ name: 'a' }) }) }) + +describe('tableRowsParamsKey', () => { + it('produces the same key for identical params', () => { + const k1 = tableRowsParamsKey({ pageSize: 1000, filter: null, sort: null }) + const k2 = tableRowsParamsKey({ pageSize: 1000, filter: null, sort: null }) + expect(k1).toBe(k2) + }) + + it('treats undefined filter and sort as null', () => { + const withUndefined = tableRowsParamsKey({ pageSize: 1000, filter: undefined, sort: undefined }) + const withNull = tableRowsParamsKey({ pageSize: 1000, filter: null, sort: null }) + expect(withUndefined).toBe(withNull) + }) + + it('produces different keys for different filters', () => { + const k1 = tableRowsParamsKey({ pageSize: 1000, filter: null, sort: null }) + const k2 = tableRowsParamsKey({ + pageSize: 1000, + filter: { column: 'name', operator: 'eq', value: 'Alice' } as never, + sort: null, + }) + expect(k1).not.toBe(k2) + }) + + it('produces different keys for different page sizes', () => { + const k1 = tableRowsParamsKey({ pageSize: 1000, filter: null, sort: null }) + const k2 = tableRowsParamsKey({ pageSize: 500, filter: null, sort: null }) + expect(k1).not.toBe(k2) + }) + + it('produces different keys for different sorts', () => { + const k1 = tableRowsParamsKey({ pageSize: 1000, filter: null, sort: null }) + const k2 = tableRowsParamsKey({ + pageSize: 1000, + filter: null, + sort: { column: 'name', direction: 'asc' } as never, + }) + expect(k1).not.toBe(k2) + }) +}) + +describe('tableRowsInfiniteOptions', () => { + const PAGE_SIZE = 1000 + + function makeOpts(pageSize = PAGE_SIZE) { + return tableRowsInfiniteOptions({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + pageSize, + filter: null, + sort: null, + }) as { + queryKey: readonly unknown[] + getNextPageParam: ( + lastPage: { rows: unknown[] }, + allPages: unknown[], + lastPageParam: unknown + ) => number | undefined + } + } + + it('getNextPageParam returns undefined for a partial page (drain terminates)', () => { + const opts = makeOpts() + const lastPage = { rows: Array.from({ length: 500 }, (_, i) => ({ id: `r${i}` })) } + expect(opts.getNextPageParam(lastPage, [], 0)).toBeUndefined() + }) + + it('getNextPageParam returns undefined for an empty page', () => { + const opts = makeOpts() + expect(opts.getNextPageParam({ rows: [] }, [], 0)).toBeUndefined() + }) + + it('getNextPageParam returns next offset for a full page', () => { + const opts = makeOpts() + const fullPage = { rows: Array.from({ length: PAGE_SIZE }, (_, i) => ({ id: `r${i}` })) } + expect(opts.getNextPageParam(fullPage, [], 0)).toBe(PAGE_SIZE) + expect(opts.getNextPageParam(fullPage, [], PAGE_SIZE)).toBe(PAGE_SIZE * 2) + }) + + it('getNextPageParam advances correctly across three pages of 1000', () => { + const opts = makeOpts() + const fullPage = { rows: Array.from({ length: PAGE_SIZE }, (_, i) => ({ id: `r${i}` })) } + const lastPartialPage = { rows: Array.from({ length: 200 }, (_, i) => ({ id: `r${i}` })) } + + expect(opts.getNextPageParam(fullPage, [], 0)).toBe(1000) + expect(opts.getNextPageParam(fullPage, [], 1000)).toBe(2000) + expect(opts.getNextPageParam(lastPartialPage, [], 2000)).toBeUndefined() + }) + + it('queryKey includes the result of tableRowsParamsKey', () => { + const paramsKey = tableRowsParamsKey({ pageSize: PAGE_SIZE, filter: null, sort: null }) + const opts = makeOpts(PAGE_SIZE) + // queryKey is a tuple; one element must be exactly the paramsKey string + expect(opts.queryKey).toContain(paramsKey) + }) + + it('queryKey differs when filter changes', () => { + const opts1 = tableRowsInfiniteOptions({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + pageSize: PAGE_SIZE, + filter: null, + sort: null, + }) as { queryKey: readonly unknown[] } + const opts2 = tableRowsInfiniteOptions({ + workspaceId: WORKSPACE_ID, + tableId: TABLE_ID, + pageSize: PAGE_SIZE, + filter: { column: 'name', operator: 'eq', value: 'Alice' } as never, + sort: null, + }) as { queryKey: readonly unknown[] } + expect(JSON.stringify(opts1.queryKey)).not.toBe(JSON.stringify(opts2.queryKey)) + }) +}) diff --git a/apps/sim/hooks/queries/tables.ts b/apps/sim/hooks/queries/tables.ts index 6ebd37ce26b..e53b77be0c8 100644 --- a/apps/sim/hooks/queries/tables.ts +++ b/apps/sim/hooks/queries/tables.ts @@ -4,10 +4,10 @@ * React Query hooks for managing user-defined tables. */ -import { useMemo } from 'react' import { createLogger } from '@sim/logger' import { type InfiniteData, + infiniteQueryOptions, keepPreviousData, useInfiniteQuery, useMutation, @@ -210,11 +210,6 @@ interface InfiniteTableRowsParams { enabled?: boolean } -/** - * Fetch a single page of rows for a table with pagination/filter/sort. Polls - * while any cell is in flight so cells reach their terminal state without a - * manual refresh. - */ export function useTableRows({ workspaceId, tableId, @@ -243,36 +238,30 @@ export function useTableRows({ }) } -/** - * Paginated row fetching with `useInfiniteQuery`. Each page requests `pageSize` - * rows at the next offset; `getNextPageParam` returns `undefined` once the last - * page comes back short, signalling end-of-list. - * - * Page 0 includes a server `COUNT(*)`; subsequent pages skip it. - */ -export function useInfiniteTableRows({ +export function tableRowsParamsKey({ + pageSize, + filter, + sort, +}: Pick): string { + return JSON.stringify({ pageSize, filter: filter ?? null, sort: sort ?? null }) +} + +export function tableRowsInfiniteOptions({ workspaceId, tableId, pageSize, filter, sort, - enabled = true, -}: InfiniteTableRowsParams) { - const paramsKey = JSON.stringify({ - pageSize, - filter: filter ?? null, - sort: sort ?? null, - }) - const queryKey = useMemo(() => tableKeys.infiniteRows(tableId, paramsKey), [tableId, paramsKey]) - - return useInfiniteQuery({ - queryKey, +}: Omit) { + const paramsKey = tableRowsParamsKey({ pageSize, filter, sort }) + return infiniteQueryOptions({ + queryKey: tableKeys.infiniteRows(tableId, paramsKey), queryFn: ({ pageParam, signal }) => fetchTableRows({ workspaceId, tableId, limit: pageSize, - offset: pageParam, + offset: pageParam as number, filter, sort, includeTotal: pageParam === 0, @@ -281,13 +270,27 @@ export function useInfiniteTableRows({ initialPageParam: 0, getNextPageParam: (lastPage, _allPages, lastPageParam) => { if (lastPage.rows.length < pageSize) return undefined - return lastPageParam + pageSize + return (lastPageParam as number) + pageSize }, - enabled: Boolean(workspaceId && tableId) && enabled, staleTime: 30 * 1000, }) } +/** Page 0 fetches a server-side `COUNT(*)`; subsequent pages skip it. */ +export function useInfiniteTableRows({ + workspaceId, + tableId, + pageSize, + filter, + sort, + enabled = true, +}: InfiniteTableRowsParams) { + return useInfiniteQuery({ + ...tableRowsInfiniteOptions({ workspaceId, tableId, pageSize, filter, sort }), + enabled: Boolean(workspaceId && tableId) && enabled, + }) +} + /** * Create a new table in a workspace. */ @@ -408,7 +411,6 @@ export function useCreateTableRow({ workspaceId, tableId }: RowMutationContext) reconcileCreatedRow(queryClient, tableId, row) }, onError: (error) => { - // Validation errors are surfaced inline by the caller (see useUpdateColumn). if (isValidationError(error)) return toast.error(error.message, { duration: 5000 }) }, @@ -763,7 +765,6 @@ export function useUpdateColumn({ workspaceId, tableId }: RowMutationContext) { queryClient.setQueryData(key, data) } } - // Validation errors are surfaced as inline FieldErrors by the caller. if (isValidationError(error)) return toast.error(error.message, { duration: 5000 }) }, @@ -924,6 +925,7 @@ export function useUploadCsvToTable() { }, onError: (error) => { logger.error('Failed to upload CSV:', error) + toast.error(error.message, { duration: 5000 }) }, onSettled: () => { queryClient.invalidateQueries({ queryKey: tableKeys.lists() }) @@ -1000,9 +1002,9 @@ export function useImportCsvIntoTable() { }, onError: (error) => { logger.error('Failed to import CSV into table:', error) + toast.error(error.message, { duration: 5000 }) }, onSettled: (_data, _error, variables) => { - if (!variables) return invalidateRowCount(queryClient, variables.tableId) }, }) @@ -1282,8 +1284,6 @@ export function useRunColumn({ workspaceId, tableId }: RowMutationContext) { }) } -// ───────────────────────── Workflow group mutations ───────────────────────── - interface AddWorkflowGroupVariables { group: WorkflowGroup outputColumns: AddWorkflowGroupBodyInput['outputColumns'] diff --git a/apps/sim/hooks/use-table-undo.test.ts b/apps/sim/hooks/use-table-undo.test.ts new file mode 100644 index 00000000000..de9a1019eb2 --- /dev/null +++ b/apps/sim/hooks/use-table-undo.test.ts @@ -0,0 +1,230 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +// Passthrough React hooks so the hook can run outside a React root. +vi.mock('react', () => ({ + useCallback: (fn: unknown) => fn, + useEffect: (fn: () => void) => fn(), + useRef: (init: unknown) => ({ current: init }), +})) + +const mockMutate = vi.fn() +const mockMutateAsync = vi.fn() + +vi.mock('@/hooks/queries/tables', () => ({ + useUpdateTableRow: vi.fn(() => ({ mutate: mockMutate })), + useCreateTableRow: vi.fn(() => ({ mutate: mockMutate })), + useBatchCreateTableRows: vi.fn(() => ({ mutate: mockMutate, mutateAsync: mockMutateAsync })), + useBatchUpdateTableRows: vi.fn(() => ({ mutate: mockMutate, mutateAsync: mockMutateAsync })), + useDeleteTableRow: vi.fn(() => ({ mutate: mockMutate })), + useDeleteTableRows: vi.fn(() => ({ mutate: mockMutate })), + useAddTableColumn: vi.fn(() => ({ mutate: mockMutate })), + useUpdateColumn: vi.fn(() => ({ mutate: mockMutate })), + useDeleteColumn: vi.fn(() => ({ mutate: mockMutate })), + useRenameTable: vi.fn(() => ({ mutate: mockMutate })), + useUpdateTableMetadata: vi.fn(() => ({ mutate: mockMutate })), +})) + +vi.mock('@/lib/table/constants', () => ({ + TABLE_LIMITS: { MAX_BULK_OPERATION_SIZE: 3 }, // small limit so tests don't need 1000 items +})) + +const mockPopUndo = vi.fn() +const mockPopRedo = vi.fn() +const mockPush = vi.fn() +const mockPatchRedoRowId = vi.fn() +const mockPatchUndoRowId = vi.fn() +const mockClear = vi.fn() + +const storeState = { + stacks: {}, + push: mockPush, + popUndo: mockPopUndo, + popRedo: mockPopRedo, + patchRedoRowId: mockPatchRedoRowId, + patchUndoRowId: mockPatchUndoRowId, + clear: mockClear, +} + +vi.mock('@/stores/table/store', () => ({ + useTableUndoStore: vi.fn((selector: (s: typeof storeState) => unknown) => selector(storeState)), + runWithoutRecording: (fn: () => unknown) => Promise.resolve(fn()), +})) + +import { useTableUndo } from '@/hooks/use-table-undo' +import type { TableUndoAction } from '@/stores/table/types' + +const WORKSPACE_ID = 'ws-1' +const TABLE_ID = 'tbl-1' + +function makeHook() { + return useTableUndo({ workspaceId: WORKSPACE_ID, tableId: TABLE_ID }) +} + +function makeEntry(action: TableUndoAction) { + return { id: 'e1', action, timestamp: Date.now() } +} + +function makeCellsForClear(count: number) { + return Array.from({ length: count }, (_, i) => ({ + rowId: `row-${i}`, + data: { col: `val-${i}` }, + })) +} + +/** Drain the microtask queue so all async chunks in executeAction finish. */ +async function flush() { + await new Promise((r) => setTimeout(r, 0)) +} + +beforeEach(() => { + vi.clearAllMocks() + mockMutateAsync.mockResolvedValue({}) +}) + +describe('useTableUndo – clear-cells chunking (via undo)', () => { + it('sends a single mutateAsync call when cells fit in one chunk', async () => { + const cells = makeCellsForClear(2) + mockPopUndo.mockReturnValueOnce(makeEntry({ type: 'clear-cells', cells })) + const { undo } = makeHook() + ;(undo as () => void)() + await flush() + expect(mockMutateAsync).toHaveBeenCalledTimes(1) + expect(mockMutateAsync.mock.calls[0][0].updates).toHaveLength(2) + }) + + it('splits into multiple chunks when cells exceed the limit', async () => { + const cells = makeCellsForClear(7) // limit=3 → [3,3,1] + mockPopUndo.mockReturnValueOnce(makeEntry({ type: 'clear-cells', cells })) + const { undo } = makeHook() + ;(undo as () => void)() + await flush() + expect(mockMutateAsync).toHaveBeenCalledTimes(3) + expect(mockMutateAsync.mock.calls[0][0].updates).toHaveLength(3) + expect(mockMutateAsync.mock.calls[1][0].updates).toHaveLength(3) + expect(mockMutateAsync.mock.calls[2][0].updates).toHaveLength(1) + }) + + it('sends original data values for undo direction', async () => { + const cells = makeCellsForClear(1) + mockPopUndo.mockReturnValueOnce(makeEntry({ type: 'clear-cells', cells })) + const { undo } = makeHook() + ;(undo as () => void)() + await flush() + expect(mockMutateAsync.mock.calls[0][0].updates[0].data.col).toBe('val-0') + }) + + it('sends null values for redo direction', async () => { + const cells = makeCellsForClear(1) + mockPopRedo.mockReturnValueOnce(makeEntry({ type: 'clear-cells', cells })) + const { redo } = makeHook() + ;(redo as () => void)() + await flush() + expect(mockMutateAsync.mock.calls[0][0].updates[0].data.col).toBeNull() + }) + + it('does not call mutateAsync when cells is empty', async () => { + mockPopUndo.mockReturnValueOnce(makeEntry({ type: 'clear-cells', cells: [] })) + const { undo } = makeHook() + ;(undo as () => void)() + await flush() + expect(mockMutateAsync).not.toHaveBeenCalled() + }) + + it('stops processing after the first failing chunk', async () => { + mockMutateAsync.mockRejectedValueOnce(new Error('Network error')) + const cells = makeCellsForClear(5) // limit=3 → would be [3,2] but stops at chunk 1 + mockPopUndo.mockReturnValueOnce(makeEntry({ type: 'clear-cells', cells })) + const { undo } = makeHook() + // executeAction catches the error internally via logger — undo itself doesn't re-throw. + ;(undo as () => void)() + await flush() + expect(mockMutateAsync).toHaveBeenCalledTimes(1) + }) +}) + +describe('useTableUndo – update-cells chunking (via undo)', () => { + function makeCellsForUpdate(count: number) { + return Array.from({ length: count }, (_, i) => ({ + rowId: `row-${i}`, + oldData: { col: `old-${i}` }, + newData: { col: `new-${i}` }, + })) + } + + it('sends a single call when cells fit within limit', async () => { + const cells = makeCellsForUpdate(2) + mockPopUndo.mockReturnValueOnce(makeEntry({ type: 'update-cells', cells })) + const { undo } = makeHook() + ;(undo as () => void)() + await flush() + expect(mockMutateAsync).toHaveBeenCalledTimes(1) + expect(mockMutateAsync.mock.calls[0][0].updates[0].data.col).toBe('old-0') + }) + + it('chunks across multiple calls and picks the correct direction data', async () => { + const cells = makeCellsForUpdate(8) // limit=3 → [3,3,2] + mockPopRedo.mockReturnValueOnce(makeEntry({ type: 'update-cells', cells })) + const { redo } = makeHook() + ;(redo as () => void)() + await flush() + expect(mockMutateAsync).toHaveBeenCalledTimes(3) + const lastChunk = mockMutateAsync.mock.calls[2][0].updates + expect(lastChunk).toHaveLength(2) + // redo direction → newData + expect(lastChunk[0].data.col).toBe('new-6') + }) +}) + +describe('useTableUndo – delete-column undo cell restore chunking', () => { + const baseAction: TableUndoAction = { + type: 'delete-column', + columnName: 'col', + columnType: 'string' as const, + columnPosition: 0, + columnUnique: false, + columnRequired: false, + cellData: [], + previousOrder: null, + previousWidth: null, + } + + it('does not call mutateAsync when cellData is empty', async () => { + mockPopUndo.mockReturnValueOnce(makeEntry(baseAction)) + const { undo } = makeHook() + ;(undo as () => void)() + await flush() + // addColumnMutation.mutate fires but the cell-restore block should not. + expect(mockMutateAsync).not.toHaveBeenCalled() + }) + + it('fires chunked mutateAsync calls via the onSuccess IIFE when cellData exceeds limit', async () => { + const cellData = Array.from({ length: 5 }, (_, i) => ({ rowId: `row-${i}`, value: i })) + const action: TableUndoAction = { ...baseAction, cellData } + mockPopUndo.mockReturnValueOnce(makeEntry(action)) + + // addColumnMutation.mutate is the first mockMutate call. + // Capture its onSuccess and invoke it to simulate column creation completing. + let capturedOnSuccess: (() => void) | undefined + mockMutate.mockImplementationOnce((_: unknown, opts: { onSuccess?: () => void }) => { + capturedOnSuccess = opts?.onSuccess + }) + + const { undo } = makeHook() + ;(undo as () => void)() + await flush() + + // At this point executeAction has returned, but the restore happens in the + // addColumn onSuccess callback — fire it now. + capturedOnSuccess?.() + // Allow the void IIFE's microtasks to drain. + await new Promise((r) => setTimeout(r, 0)) + + // limit=3 → [3, 2] + expect(mockMutateAsync).toHaveBeenCalledTimes(2) + expect(mockMutateAsync.mock.calls[0][0].updates).toHaveLength(3) + expect(mockMutateAsync.mock.calls[1][0].updates).toHaveLength(2) + }) +}) diff --git a/apps/sim/hooks/use-table-undo.ts b/apps/sim/hooks/use-table-undo.ts index accdd79b599..8a364d54691 100644 --- a/apps/sim/hooks/use-table-undo.ts +++ b/apps/sim/hooks/use-table-undo.ts @@ -1,9 +1,6 @@ -/** - * Hook that connects the table undo/redo store to React Query mutations. - */ - import { useCallback, useEffect, useRef } from 'react' import { createLogger } from '@sim/logger' +import { TABLE_LIMITS } from '@/lib/table/constants' import { useAddTableColumn, useBatchCreateTableRows, @@ -22,9 +19,6 @@ import type { TableUndoAction } from '@/stores/table/types' const logger = createLogger('useTableUndo') -/** - * Extract the row ID from a create-row API response. - */ export function extractCreatedRowId(response: Record): string | undefined { const data = response?.data as Record | undefined const row = data?.row as Record | undefined @@ -90,7 +84,7 @@ export function useTableUndo({ ) const executeAction = useCallback( - (action: TableUndoAction, direction: 'undo' | 'redo') => { + async (action: TableUndoAction, direction: 'undo' | 'redo') => { try { switch (action.type) { case 'update-cell': { @@ -110,7 +104,11 @@ export function useTableUndo({ ? cell.data : Object.fromEntries(Object.keys(cell.data).map((k) => [k, null])), })) - batchUpdateRowsMutation.mutate({ updates }) + for (let i = 0; i < updates.length; i += TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) { + await batchUpdateRowsMutation.mutateAsync({ + updates: updates.slice(i, i + TABLE_LIMITS.MAX_BULK_OPERATION_SIZE), + }) + } break } @@ -119,7 +117,11 @@ export function useTableUndo({ rowId: cell.rowId, data: direction === 'undo' ? cell.oldData : cell.newData, })) - batchUpdateRowsMutation.mutate({ updates }) + for (let i = 0; i < updates.length; i += TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) { + await batchUpdateRowsMutation.mutateAsync({ + updates: updates.slice(i, i + TABLE_LIMITS.MAX_BULK_OPERATION_SIZE), + }) + } break } @@ -239,17 +241,24 @@ export function useTableUndo({ rowId: c.rowId, data: { [action.columnName]: c.value }, })) - batchUpdateRowsMutation.mutate( - { updates }, - { - onError: (error) => { - logger.error('Failed to restore cell data on delete-column undo', { - columnName: action.columnName, - error, + void (async () => { + try { + for ( + let i = 0; + i < updates.length; + i += TABLE_LIMITS.MAX_BULK_OPERATION_SIZE + ) { + await batchUpdateRowsMutation.mutateAsync({ + updates: updates.slice(i, i + TABLE_LIMITS.MAX_BULK_OPERATION_SIZE), }) - }, + } + } catch (error) { + logger.error('Failed to restore cell data on delete-column undo', { + columnName: action.columnName, + error, + }) } - ) + })() } const metadata: Record = {} if (action.previousOrder) { @@ -346,19 +355,13 @@ export function useTableUndo({ const undo = useCallback(() => { const entry = popUndo(tableId) if (!entry) return - - runWithoutRecording(() => { - executeAction(entry.action, 'undo') - }) + void runWithoutRecording(() => executeAction(entry.action, 'undo')) }, [popUndo, tableId, executeAction]) const redo = useCallback(() => { const entry = popRedo(tableId) if (!entry) return - - runWithoutRecording(() => { - executeAction(entry.action, 'redo') - }) + void runWithoutRecording(() => executeAction(entry.action, 'redo')) }, [popRedo, tableId, executeAction]) return { pushUndo, undo, redo, canUndo, canRedo } diff --git a/apps/sim/lib/table/constants.ts b/apps/sim/lib/table/constants.ts index fd165163f20..67f01eb4190 100644 --- a/apps/sim/lib/table/constants.ts +++ b/apps/sim/lib/table/constants.ts @@ -9,7 +9,7 @@ export const TABLE_LIMITS = { MAX_ROWS_PER_TABLE: 10000, MAX_ROW_SIZE_BYTES: 100 * 1024, // 100KB MAX_COLUMNS_PER_TABLE: 50, - MAX_TABLE_NAME_LENGTH: 50, + MAX_TABLE_NAME_LENGTH: 128, MAX_COLUMN_NAME_LENGTH: 50, MAX_STRING_VALUE_LENGTH: 10000, MAX_DESCRIPTION_LENGTH: 500, diff --git a/apps/sim/package.json b/apps/sim/package.json index dab333326b0..766b04cb567 100644 --- a/apps/sim/package.json +++ b/apps/sim/package.json @@ -195,6 +195,7 @@ "tailwind-merge": "^2.6.0", "tailwindcss-animate": "^1.0.7", "three": "0.177.0", + "tldts": "7.0.30", "twilio": "5.9.0", "unified": "11.0.5", "unpdf": "1.4.0", diff --git a/apps/sim/stores/table/store.ts b/apps/sim/stores/table/store.ts index 47d1e973f67..fc4df2808c7 100644 --- a/apps/sim/stores/table/store.ts +++ b/apps/sim/stores/table/store.ts @@ -81,13 +81,14 @@ function patchRowIdInEntry(entry: UndoEntry, oldRowId: string, newRowId: string) } /** - * Run a function without recording undo entries. - * Used by the hook when executing undo/redo mutations to prevent recursive recording. + * Run a function without recording undo entries. Supports async functions — + * `undoRedoInProgress` stays true until the returned Promise settles, so + * mutations inside `executeAction` don't accidentally push new undo entries. */ -export function runWithoutRecording(fn: () => T): T { +export async function runWithoutRecording(fn: () => T | Promise): Promise { undoRedoInProgress = true try { - return fn() + return await fn() } finally { undoRedoInProgress = false } diff --git a/bun.lock b/bun.lock index 29c91ea34bd..d682cb37cdf 100644 --- a/bun.lock +++ b/bun.lock @@ -1,6 +1,6 @@ { "lockfileVersion": 1, - "configVersion": 0, + "configVersion": 1, "workspaces": { "": { "name": "simstudio", @@ -250,6 +250,7 @@ "tailwind-merge": "^2.6.0", "tailwindcss-animate": "^1.0.7", "three": "0.177.0", + "tldts": "7.0.30", "twilio": "5.9.0", "unified": "11.0.5", "unpdf": "1.4.0", @@ -492,39 +493,39 @@ "@adobe/css-tools": ["@adobe/css-tools@4.4.4", "", {}, "sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg=="], - "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@3.0.98", "", { "dependencies": { "@ai-sdk/anthropic": "2.0.78", "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-zVMBSYVWYxTK7PM2LCnyRc1YGUHvOcDXGz2Xvsb4FvNFpFGTVdxp9PMFYkM12IXzpYtMNn7z6H9WRZeZ4BnPyQ=="], + "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@3.0.99", "", { "dependencies": { "@ai-sdk/anthropic": "2.0.79", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-d/WsYOlqjQeEwTewawjrlhoWfHt3q1vRT5/XdFJ6U+KYd/3HnAlrA3rg0+T7xMk98XmctaILJb45Ct/8zrGxSA=="], - "@ai-sdk/anthropic": ["@ai-sdk/anthropic@2.0.78", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-88zglac49Dyf0osHVKX8Cgim5mo+xfVACSMbNlVt7q97S2hWTuMeVSPCyDM8UCJQ6m3jNNLlMZLEwVdOdxZjBg=="], + "@ai-sdk/anthropic": ["@ai-sdk/anthropic@2.0.79", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-K0U09FPDO1kmLPjRLXFcNSvmnKHJBMARCb8r3Ulw7wU6/+Zh9djWcFDiPPNsklg6yAezcdLTcYPszgWJJ6iOTA=="], - "@ai-sdk/azure": ["@ai-sdk/azure@2.0.106", "", { "dependencies": { "@ai-sdk/openai": "2.0.104", "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-mJ3ZJag/A0Al1dTLOHAIQT0jep6Bh4pNWsmu4es0cYfFEQWPkFS+zKhi29dYikicmb50Gy3GKDJGXazWdHQWuA=="], + "@ai-sdk/azure": ["@ai-sdk/azure@2.0.108", "", { "dependencies": { "@ai-sdk/openai": "2.0.106", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-/F+lx3glCDiqJfqkZP9IOCubYlWABX2Jg9Yzm/JIxZR5qHfo9rsLwS4zVtghbELVbEjxakaFlDT/c6uTBj0uug=="], - "@ai-sdk/cerebras": ["@ai-sdk/cerebras@1.0.42", "", { "dependencies": { "@ai-sdk/openai-compatible": "1.0.37", "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-zKubak4yiU4jcCyVNgUxvsc2Hj1fBC+LRfGNcPuN/3A8juHyp67rQ714tuJR04mOhtTOFNipa1A/fN8hDsH5Uw=="], + "@ai-sdk/cerebras": ["@ai-sdk/cerebras@1.0.44", "", { "dependencies": { "@ai-sdk/openai-compatible": "1.0.39", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-2w7+jq0bWEF6McgWPb2gjaEx1TpqdUq4eyX/gPLTp7HzfDZKEVmmVXRvnKvjzBP/VH7xW4OT5jhTpTPTfYNYYQ=="], - "@ai-sdk/deepseek": ["@ai-sdk/deepseek@1.0.38", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-mxwHlryNx2r7k/w5ftiDnt+RRjzmsQ/Lx92DFFMbYlzO7fZvHwZKGsnbrm5DxpjDDVDKAsDKAGcT1NROQUI/sA=="], + "@ai-sdk/deepseek": ["@ai-sdk/deepseek@1.0.39", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-5TXw7Pm0+/YL2WdnZpXBgruPayhqBgBMNDL95V14Sf4MQz+RmNMhansvK8Fv9Dcgp3Y0p7EasNsPWYJOfj0zoA=="], - "@ai-sdk/gateway": ["@ai-sdk/gateway@2.0.83", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-qgxu2++9tJTPZtC+VGczu21YNXTtzfrLQunqh7xcCaWSogAluchrGiKFS3IZkX7Se9dEt1yYZ6+d+cGo4cko6Q=="], + "@ai-sdk/gateway": ["@ai-sdk/gateway@2.0.87", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-9aPUt/pJb2NY1HPeJIGHBPUxZiZu+EX1aNyBCGDynHtLzCBaZCANMWUxrluxmGLpoYTRik+WxLzUMSZS/FEGew=="], - "@ai-sdk/google": ["@ai-sdk/google@2.0.71", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-98KQxlPEU1zL0wp/098EQZou36C6hIhiXnhGEatqb1l0xGsta/DsQ614GpRK8nCfeqAO2Q27vLcgX3oltujBbA=="], + "@ai-sdk/google": ["@ai-sdk/google@2.0.72", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-BjDY6l+rV4CmHKjZe4H0uRXW3M2o+g7PaYM8oFpW+9PP1qKNEybnJ6//Si7BSf6DT+86dKARrtEl09lxSSaMaA=="], - "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@3.0.133", "", { "dependencies": { "@ai-sdk/anthropic": "2.0.78", "@ai-sdk/google": "2.0.71", "@ai-sdk/openai-compatible": "1.0.37", "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-jlM/a/lKfXwAbU29TQf5hCt4fnROlE4G/Yk+DmP2mTbWvAgHpSYx7CwMjldvUoTTpyNHk6yg8FxkTSOLA0Stcg=="], + "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@3.0.137", "", { "dependencies": { "@ai-sdk/anthropic": "2.0.79", "@ai-sdk/google": "2.0.72", "@ai-sdk/openai-compatible": "1.0.39", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-vDtCmwMy4CzVsv3PESmkE96qDSqnsArDDEc22eggujZI/WxmIeKa+8vyUYjJUx9HZLOCPo7HhYDXjH0R2mcM+Q=="], - "@ai-sdk/groq": ["@ai-sdk/groq@2.0.39", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-S3fp415JjLvoe//bzIWWT0wEz+C9dEFx5vhJ5QS1RjuWSHNXfp46xkpS3fvHl3y6uVnxzsgghm5C1V002NGDIA=="], + "@ai-sdk/groq": ["@ai-sdk/groq@2.0.40", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-1EL8D1tyjOKjCFUt8XspDoA6zxDcalMsLR2O56ji8QklWsAPaf4TuMJAvf5x5KDrkuJaSAjk94KvPH5hOX+VNQ=="], - "@ai-sdk/mistral": ["@ai-sdk/mistral@2.0.32", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hs4CCm4OrVhNA2F766CPV5LUt0RlL4oOOvNXLpVZnbLzvkUDx2oLHQU/xERX1fxHzdegv8TEgB+VuUKp9TGhlg=="], + "@ai-sdk/mistral": ["@ai-sdk/mistral@2.0.33", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-oBR9nJQ8TRFU0JIIXF+0cFTo8VVEreA1V8AMD3c77BJj/1NUSBLrhyqAbX9k7YAtztvZHUdFcm3+vK8KIx0sUQ=="], - "@ai-sdk/openai": ["@ai-sdk/openai@2.0.104", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-pRK41FjpaHvq0tNO0c+Ye9WfrdBLmPVMPivXC4Z9fpqBKcUsmh9bo0xdPt6dEQtD/si8ygoMn4Q+6yj83NwiRg=="], + "@ai-sdk/openai": ["@ai-sdk/openai@2.0.106", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-EFC0rpo1wfe4HIz5KZCE72edP2J7fOeR7wPXzjCDljaTRB1wectKDIKRLowpU4F0mbcJ+XScAsoYNPK/Z20aVQ=="], - "@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@1.0.37", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-DZzoZHeCYGG4IBujzT+aSjodxecp1NxNZYIrfSzCzTOWnvvuZgzLE5Hwi3ZWANwRnxBbsVXVQ1b5ghxtRbkoxg=="], + "@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@1.0.39", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-001hdQPPXxYBWrz5d+eAmBVYmwzsB+guIey1DFXi1ZEE5H3j7fRrhPpX55MdM9Fle2DS7WZ8b3qkumCIWE92YQ=="], - "@ai-sdk/perplexity": ["@ai-sdk/perplexity@2.0.29", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-n1BX39P2fSKjQvEXWnMb6VbBq3kiKNYPaS1/Q+7hnR2Su9FR3juW7oKa0wEV+shldQKRpcZzcX14MhBeD0g37A=="], + "@ai-sdk/perplexity": ["@ai-sdk/perplexity@2.0.30", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ymXWoItR4tRCIQlJcpn0zk4jBUU+j4SDnliz/z1f5U6rWxNY1ttxFCk4uZ+6Zt9e3VjQTpA9FK6cOJt18JRrKQ=="], - "@ai-sdk/provider": ["@ai-sdk/provider@2.0.2", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-Epf0oKdUxNRK97Qm4l/Sp05TnwzE8FsyRF5p6nncOp8zH0GTuwK2uZoyzE/3uVjRdZNLyQ6Jw/SBjlOScMQy1Q=="], + "@ai-sdk/provider": ["@ai-sdk/provider@2.0.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-h88OPkavHTiN9tMn2l5awAznGB0lXzjcLhgR1/rvjB2zlLprsNxbM2tt6OJsHUxduLC3klq0/eqaSf6fX5XVww=="], - "@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.24", "", { "dependencies": { "@ai-sdk/provider": "2.0.2", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Zq6olgYvpMgfstQNpDwgqDC2wBEE+OnMnMuq4JyIu+aWjL8JJl+6u1sbKJNPxASErWrRlmOPIkat2fHiN4puhA=="], + "@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.25", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CvsRu+32Y8a167s+lrIBtsybvgTHp8j9y+6BeTvLeoW3Q+okw/b4CnNUFOLIXsRaKHQKAH+IHNJPYWywfpw0LA=="], - "@ai-sdk/togetherai": ["@ai-sdk/togetherai@1.0.40", "", { "dependencies": { "@ai-sdk/openai-compatible": "1.0.37", "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-p/tUlmcV/xKEElQliPnWn/TcqaxuC++HmZXICcfzqooMXyDfrxBBS55XWJl6QZ3IwfniDr8PrnsheYD4ECdkJg=="], + "@ai-sdk/togetherai": ["@ai-sdk/togetherai@1.0.42", "", { "dependencies": { "@ai-sdk/openai-compatible": "1.0.39", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-V9reHPfWeaIt6fu03lVbjZDuxfdplS5jdmzVchVBeUug9VqIK+9KQELcPvdWKdxf+ov+sCoShN/O6dYfPPD5Ng=="], - "@ai-sdk/xai": ["@ai-sdk/xai@2.0.69", "", { "dependencies": { "@ai-sdk/openai-compatible": "1.0.37", "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-xnbMYsbqBawFP/fbqCswcHXaU3GB3EcuENc+EpCCTEZi6yRj4XgRs2RFRkgmmb6Pwl2XcohCj/7+SMf/VeZnGA=="], + "@ai-sdk/xai": ["@ai-sdk/xai@2.0.72", "", { "dependencies": { "@ai-sdk/openai-compatible": "1.0.39", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-RXpfCTliybesXOmc+jGB7NhobJzzZc2rr7gSy7kGj0eHDYXkCmoo4/llpE8yKIUJMwU098DP1cBGdltPezNRiw=="], "@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="], @@ -586,31 +587,29 @@ "@aws-sdk/client-sqs": ["@aws-sdk/client-sqs@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-sdk-sqs": "^3.972.20", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/md5-js": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-n102sARTLi53Da0JT/2Kvg/bQ4bv+JqA+YQ8OlaM4CgsPn61sMv0x9PxdF6s/KbgZ2HMwYBszNzuvUttN+Beqg=="], - "@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-SdqJGWVhmIURvCSgkDditHRO+ozubwZk9aCX9MK8qxyOndhobCndW1ozl3hX9psvMAo9Q4bppjuqy/GHWpjB+A=="], - "@aws-sdk/client-sso-admin": ["@aws-sdk/client-sso-admin@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-nQZibW7Uwflbn7wC3CnA1hXIo34f2oxvPsfmAnzqyOhSHL6v1LNwElQywmZStiFjmATIlJVQKvCaj+/MAKscNw=="], "@aws-sdk/client-sts": ["@aws-sdk/client-sts@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/signature-v4-multi-region": "^3.996.18", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-FCLc5VWb+yz1xb/Jv0sXFGqIIs+bHZQWBKbPQKCuypF3wU/7UFygXuSXo9uJfwISKNGVHJwp+0136f8mqmzRcA=="], - "@aws-sdk/core": ["@aws-sdk/core@3.974.6", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws-sdk/xml-builder": "^3.972.20", "@smithy/core": "^3.23.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-8Vu7zGxu+39ChR/s5J7nXBw3a2kMHAi0OfKT8ohgTVjX0qYed/8mIfdBb638oBmKrWCwwKjYAM5J/4gMJ8nAJA=="], + "@aws-sdk/core": ["@aws-sdk/core@3.974.8", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws-sdk/xml-builder": "^3.972.22", "@smithy/core": "^3.23.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.6", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-njR2qoG6ZuB0kvAS2FyICsFZJ6gmCcf2X/7JcD14sUvGDm26wiZ5BrA6LOiUxKFEF+IVe7kdroxyE00YlkiYsw=="], "@aws-sdk/crc64-nvme": ["@aws-sdk/crc64-nvme@3.972.7", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-QUagVVBbC8gODCF6e1aV0mE2TXWB9Opz4k8EJFdNrujUVQm5R4AjJa1mpOqzwOuROBzqJU9zawzig7M96L8Ejg=="], - "@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.972.32", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-7vA4GHg8NSmQxquJHSBcSM3RgB4ZaaRi6u4+zGFKOmOH6aqlgr2Sda46clkZDYzlirgfY96w15Zj0jh6PT48ng=="], + "@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.972.34", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-XT0jtf8Fw9JE6ppsQeoNnZRiG+jqRixMT1v1ZR17G60UvVdsQmTG8nbEyHuEPfMxDXEhfdARaM/XiEhca4lGHQ=="], - "@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.972.34", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/types": "^3.973.8", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/node-http-handler": "^4.6.1", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-stream": "^4.5.25", "tslib": "^2.6.2" } }, "sha512-vBrhWujFCLp1u8ptJRWYlipMutzPptb8pDQ00rKVH9q67T7rGd3VTWIj63aKrlLuY6qSsw1Rt5F/D/7wnNgryA=="], + "@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.972.36", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/types": "^3.973.8", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/node-http-handler": "^4.6.1", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-stream": "^4.5.25", "tslib": "^2.6.2" } }, "sha512-DPoGWfy7J7RKxvbf5kOKIGQkD2ek3dbKgzKIGrnLuvZBz5myU+Im/H6pmc14QcnFbqHMqxvtWSgRDSJW3qXLQg=="], - "@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.972.36", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/credential-provider-env": "^3.972.32", "@aws-sdk/credential-provider-http": "^3.972.34", "@aws-sdk/credential-provider-login": "^3.972.36", "@aws-sdk/credential-provider-process": "^3.972.32", "@aws-sdk/credential-provider-sso": "^3.972.36", "@aws-sdk/credential-provider-web-identity": "^3.972.36", "@aws-sdk/nested-clients": "^3.997.4", "@aws-sdk/types": "^3.973.8", "@smithy/credential-provider-imds": "^4.2.14", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-FBHyCmV8EB0gUvh1d+CZm87zt2PrdC7OyWexLRoH3I5zWSOUGa+9t58Y5jbxRfwUp3AWpHAFvKY6YzgR845sVA=="], + "@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.972.38", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/credential-provider-env": "^3.972.34", "@aws-sdk/credential-provider-http": "^3.972.36", "@aws-sdk/credential-provider-login": "^3.972.38", "@aws-sdk/credential-provider-process": "^3.972.34", "@aws-sdk/credential-provider-sso": "^3.972.38", "@aws-sdk/credential-provider-web-identity": "^3.972.38", "@aws-sdk/nested-clients": "^3.997.6", "@aws-sdk/types": "^3.973.8", "@smithy/credential-provider-imds": "^4.2.14", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-oDzUBu2MGJFgoar05sPMCwSrhw44ASyccrHzj66vO69OZqi7I6hZZxXfuPLC8OCzW7C+sU+bI73XHij41yekgQ=="], - "@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.972.36", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/nested-clients": "^3.997.4", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-IFap01lJKxQc0C/OHmZwZQr/cKq0DhrcmKedRrdnnl42D+P0SImnnnWQjv07uIPqpEdtqmkPXb9TiPYTU+prxQ=="], + "@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.972.38", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/nested-clients": "^3.997.6", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-g1NosS8qe4OF++G2UFCM5ovSkgipC7YYor5KCWatG0UoMSO5YFj9C8muePlyVmOBV/WTI16Jo3/s1NUo/o1Bww=="], - "@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.972.37", "", { "dependencies": { "@aws-sdk/credential-provider-env": "^3.972.32", "@aws-sdk/credential-provider-http": "^3.972.34", "@aws-sdk/credential-provider-ini": "^3.972.36", "@aws-sdk/credential-provider-process": "^3.972.32", "@aws-sdk/credential-provider-sso": "^3.972.36", "@aws-sdk/credential-provider-web-identity": "^3.972.36", "@aws-sdk/types": "^3.973.8", "@smithy/credential-provider-imds": "^4.2.14", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-/WFixFAAiw8WpmjZcI0l4t3DerXLmVinOIfuotmRZnu2qmsFPoqqmstASz0z8bi1pGdFXzeLzf6bwucM3mZcUQ=="], + "@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.972.39", "", { "dependencies": { "@aws-sdk/credential-provider-env": "^3.972.34", "@aws-sdk/credential-provider-http": "^3.972.36", "@aws-sdk/credential-provider-ini": "^3.972.38", "@aws-sdk/credential-provider-process": "^3.972.34", "@aws-sdk/credential-provider-sso": "^3.972.38", "@aws-sdk/credential-provider-web-identity": "^3.972.38", "@aws-sdk/types": "^3.973.8", "@smithy/credential-provider-imds": "^4.2.14", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-HEswDQyxUtadoZ/bJsPPENHg7R0Lzym5LuMksJeHvqhCOpP+rtkDLKI4/ZChH4w3cf5kG8n6bZuI8PzajoiqMg=="], - "@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.972.32", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-uZp4tlGbpczV8QxmtIwOpSkcyGtBRR8/T4BAumRKfAt1nwCig3FSCZvrKl6ARDIDVRYn5p2oRcAsfFR01EgMGA=="], + "@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.972.34", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-T3IFs4EVmVi1dVN5RciFnklCANSzvrQd/VuHY9ThHSQmYkTogjcGkoJEr+oNUPQZnso52183088NqysMPji1/Q=="], - "@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.972.36", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/nested-clients": "^3.997.4", "@aws-sdk/token-providers": "3.1038.0", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-DsLr0UHMyKzRJKe2bjlwU8q1cfoXg8TIJKV/xwvnalAemiZLOZunFzj/whGnFDZIBVLdnbLiwv5SvRf1+CSwkg=="], + "@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.972.38", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/nested-clients": "^3.997.6", "@aws-sdk/token-providers": "3.1041.0", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-5ZxG+t0+3Q3QPh8KEjX6syskhgNf7I0MN7oGioTf6Lm1NTjfP7sIcYGNsthXC2qR8vcD3edNZwCr2ovfSSWuRA=="], - "@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.972.36", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/nested-clients": "^3.997.4", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-uzrURO7frJhHQVVNR5zBJcCYeMYflmXcWBK1+MiBym2Dfjh6nXATrMixrmGZi+97Q7ETZ+y/4lUwAy0Nfnznjw=="], + "@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.972.38", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/nested-clients": "^3.997.6", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-lYHFF30DGI20jZcYX8cm6Ns0V7f1dDN6g/MBDLTyD/5iw+bXs3yBr2iAiHDkx4RFU5JgsnZvCHYKiRVPRdmOgw=="], "@aws-sdk/dynamodb-codec": ["@aws-sdk/dynamodb-codec@3.973.8", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@smithy/core": "^3.23.17", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "tslib": "^2.6.2" } }, "sha512-dYQ/cQqHZd23hcl8oEGwPphTqyGnmvf2HrVmz4J90Q5Bv89oJjlwcBcifiiTvApqsVpx7Pr0IebMpkYwWJvZlQ=="], @@ -628,7 +627,7 @@ "@aws-sdk/middleware-expect-continue": ["@aws-sdk/middleware-expect-continue@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-2Yn0f1Qiq/DjxYR3wfI3LokXnjOhFM7Ssn4LTdFDIxRMCE6I32MAsVnhPX1cUZsuVA9tiZtwwhlSLAtFGxAZlQ=="], - "@aws-sdk/middleware-flexible-checksums": ["@aws-sdk/middleware-flexible-checksums@3.974.14", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", "@aws-sdk/core": "^3.974.6", "@aws-sdk/crc64-nvme": "^3.972.7", "@aws-sdk/types": "^3.973.8", "@smithy/is-array-buffer": "^4.2.2", "@smithy/node-config-provider": "^4.3.14", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-mhTO3amGzYv/DQNbbqZo6UkHquBHlEEVRZwXmjeRqLmy1l9z3xCiFzglPL7n9JpVc2DZc9kjaraAn3JQrueZbw=="], + "@aws-sdk/middleware-flexible-checksums": ["@aws-sdk/middleware-flexible-checksums@3.974.16", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", "@aws-sdk/core": "^3.974.8", "@aws-sdk/crc64-nvme": "^3.972.7", "@aws-sdk/types": "^3.973.8", "@smithy/is-array-buffer": "^4.2.2", "@smithy/node-config-provider": "^4.3.14", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-6ru8doI0/XzszqLIPXf0E/V7HhAw1Pu94010XCKYtBUfD0LxF0BuOzrUf8OQGR6j2o6wgKTHUniOmndQycHwCA=="], "@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-IJSsIMeVQ8MMCPbuh1AbltkFhLBLXn7aejzfX5YKT/VLDHn++Dcz8886tXckE+wQssyPUhaXrJhdakO2VilRhg=="], @@ -638,23 +637,23 @@ "@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.972.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws/lambda-invoke-store": "^0.2.2", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-+zz6f79Kj9V5qFK2P+D8Ehjnw4AhphAlCAsPjUqEcInA9umtSSKMrHbSagEeOIsDNuvVrH98bjRHcyQukTrhaQ=="], - "@aws-sdk/middleware-sdk-s3": ["@aws-sdk/middleware-sdk-s3@3.972.35", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-arn-parser": "^3.972.3", "@smithy/core": "^3.23.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-lLppaNTAz+wNgLdi4FtHzrlwrGF0ODTnBWHBaFg85SKs0eJ+M+tP5ifrA8f/0lNd+Ak3MC1NGC6RavV3ny4HTg=="], + "@aws-sdk/middleware-sdk-s3": ["@aws-sdk/middleware-sdk-s3@3.972.37", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-arn-parser": "^3.972.3", "@smithy/core": "^3.23.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-Km7M+i8DrLArVzrid1gfxeGhYHBd3uxvE77g0s5a52zPSVosxzQBnJ0gwWb6NIp/DOk8gsBMhi7V+cpJG0ndTA=="], "@aws-sdk/middleware-sdk-sqs": ["@aws-sdk/middleware-sdk-sqs@3.972.22", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-DtR3mEiOUJcnEX/QuXmvbJto6xvQzp2ftnHb29c0aQYdmmzbKf0gsu9ovx1i/yy4ZR6m0rttTucS0iiP32dlGA=="], "@aws-sdk/middleware-ssec": ["@aws-sdk/middleware-ssec@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-Gli9A0u8EVVb+5bFDGS/QbSVg28w/wpEidg1ggVcSj65BDTdGR6punsOcVjqdiu1i42WHWo51MCvARPIIz9juw=="], - "@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.972.36", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@smithy/core": "^3.23.17", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-retry": "^4.3.5", "tslib": "^2.6.2" } }, "sha512-O2beToxguBvrZFFZ+fFgPbbae8MvyIBjQ6lImee4APHEXXNAD5ZJ2ayLF1mb7rsKw86TM81y5czg82bZncjSjg=="], + "@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.972.38", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@smithy/core": "^3.23.17", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-retry": "^4.3.6", "tslib": "^2.6.2" } }, "sha512-iz+B29TXcAZsJpwB+AwG/TTGA5l/VnmMZ2UxtiySOZjI6gCdmviXPwdgzcmuazMy16rXoPY4mYCGe7zdNKfx5A=="], "@aws-sdk/middleware-websocket": ["@aws-sdk/middleware-websocket@3.972.16", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-format-url": "^3.972.10", "@smithy/eventstream-codec": "^4.2.14", "@smithy/eventstream-serde-browser": "^4.2.14", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-86+S9oCyRVGzoMRpQhxkArp7kD2K75GPmaNevd9B6EyNhWoNvnCZZ3WbgN4j7ZT+jvtvBCGZvI2XHsWZJ+BRIg=="], - "@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.997.4", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.6", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.36", "@aws-sdk/region-config-resolver": "^3.972.13", "@aws-sdk/signature-v4-multi-region": "^3.996.23", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.22", "@smithy/config-resolver": "^4.4.17", "@smithy/core": "^3.23.17", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/middleware-retry": "^4.5.6", "@smithy/middleware-serde": "^4.2.20", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.6.1", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.49", "@smithy/util-defaults-mode-node": "^4.2.54", "@smithy/util-endpoints": "^3.4.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-4Sf+WY1lMJzXlw5MiyCMe/UzdILCwvuaHThbqMXS6dfh9gZy3No360I42RXquOI/ULUOhWy2HCyU0Fp20fQGPQ=="], + "@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.997.6", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.8", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.38", "@aws-sdk/region-config-resolver": "^3.972.13", "@aws-sdk/signature-v4-multi-region": "^3.996.25", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.24", "@smithy/config-resolver": "^4.4.17", "@smithy/core": "^3.23.17", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/middleware-retry": "^4.5.7", "@smithy/middleware-serde": "^4.2.20", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.6.1", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.49", "@smithy/util-defaults-mode-node": "^4.2.54", "@smithy/util-endpoints": "^3.4.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.6", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-WBDnqatJl+kGObpfmfSxqnXeYTu3Me8wx8WCtvoxX3pfWrrTv8I4WTMSSs7PZqcRcVh8WeUKMgGFjMG+52SR1w=="], "@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.972.13", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/config-resolver": "^4.4.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-CvJ2ZIjK/jVD/lbOpowBVElJyC1YxLTIJ13yM0AEo0t2v7swOzGjSA6lJGH+DwZXQhcjUjoYwc8bVYCX5MDr1A=="], "@aws-sdk/s3-request-presigner": ["@aws-sdk/s3-request-presigner@3.1032.0", "", { "dependencies": { "@aws-sdk/signature-v4-multi-region": "^3.996.18", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-format-url": "^3.972.10", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-LFaI5JQhiOmJDjKK02ir9oERU9AmxdyEvzv332oPDzAzWeNH06sZ1WsF3xRBBE5tbEH2jIc79N8EqDCY0s5kKQ=="], - "@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.996.23", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "^3.972.35", "@aws-sdk/types": "^3.973.8", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-wBbys3Y53Ikly556vyADurKpYQHXS7Jjaskbz+Ga9PZCz7PB/9f3VdKbDlz7dqIzn+xwz7L/a6TR4iXcOi8IRw=="], + "@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.996.25", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "^3.972.37", "@aws-sdk/types": "^3.973.8", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-+CMIt3e1VzlklAECmG+DtP1sV8iKq25FuA0OKpnJ4KA0kxUtd7CgClY7/RU6VzJBQwbN4EJ9Ue6plvqx1qGadw=="], "@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.1032.0", "", { "dependencies": { "@aws-sdk/core": "^3.974.1", "@aws-sdk/nested-clients": "^3.996.21", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-n+PU8Z+gll7p3wDrH+Wo6fkt8sPrVnq30YYM6Ryga95oJlEneNMEbDHj0iqjMX3V7gaGdJo/hJWyPo4lscP+mA=="], @@ -672,9 +671,9 @@ "@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/types": "^4.14.1", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-FAzqXvfEssGdSIz8ejatan0bOdx1qefBWKF/gWmVBXIP1HkS7v/wjjaqrAGGKvyihrXTXW00/2/1nTJtxpXz7g=="], - "@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.973.22", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "^3.972.36", "@aws-sdk/types": "^3.973.8", "@smithy/node-config-provider": "^4.3.14", "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-YTYqTmOUrwbm1h99Ee4y/mVYpFRl0oSO/amtP5cc1BZZWdaAVWs9zj3TkyRHWvR9aI/ZS8m3mS6awXtYUlWyaw=="], + "@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.973.24", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "^3.972.38", "@aws-sdk/types": "^3.973.8", "@smithy/node-config-provider": "^4.3.14", "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-ZWwlkjcIp7cEL8ZfTpTAPNkwx25p7xol0xlKoWVVf22+nsjwmLcHYtTPjIV1cSpmB/b6DaK4cb1fSkvCXHgRdw=="], - "@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.21", "", { "dependencies": { "@nodable/entities": "2.1.0", "@smithy/types": "^4.14.1", "fast-xml-parser": "5.7.2", "tslib": "^2.6.2" } }, "sha512-qxNiHUtlrsjTeSlrPWiFkWps7uD6YB4eKzg7eLAFH8jbiHTlt0ePNlo2Xu+WlftP38JIcMaIX4jTUjOlE2ySWw=="], + "@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.22", "", { "dependencies": { "@nodable/entities": "2.1.0", "@smithy/types": "^4.14.1", "fast-xml-parser": "5.7.2", "tslib": "^2.6.2" } }, "sha512-PMYKKtJd70IsSG0yHrdAbxBr+ZWBKLvzFZfD3/urxgf6hXVMzuU5M+3MJ5G67RpOmLBu1fAUN65SbWuKUCOlAA=="], "@aws/lambda-invoke-store": ["@aws/lambda-invoke-store@0.2.4", "", {}, "sha512-iY8yvjE0y651BixKNPgmv1WrQc+GZ142sb0z4gYnChDDY2YqI4P/jsSopBWrKfAt7LOJAkOXt7rC/hms+WclQQ=="], @@ -710,7 +709,7 @@ "@babel/code-frame": ["@babel/code-frame@7.29.0", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw=="], - "@babel/compat-data": ["@babel/compat-data@7.29.0", "", {}, "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg=="], + "@babel/compat-data": ["@babel/compat-data@7.29.3", "", {}, "sha512-LIVqM46zQWZhj17qA8wb4nW/ixr2y1Nw+r1etiAWgRM6U1IqP+LNhL1yg440jYZR72jCWcWbLWzIosH+uP1fqg=="], "@babel/core": ["@babel/core@7.29.0", "", { "dependencies": { "@babel/code-frame": "^7.29.0", "@babel/generator": "^7.29.0", "@babel/helper-compilation-targets": "^7.28.6", "@babel/helper-module-transforms": "^7.28.6", "@babel/helpers": "^7.28.6", "@babel/parser": "^7.29.0", "@babel/template": "^7.28.6", "@babel/traverse": "^7.29.0", "@babel/types": "^7.29.0", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA=="], @@ -734,7 +733,7 @@ "@babel/helpers": ["@babel/helpers@7.29.2", "", { "dependencies": { "@babel/template": "^7.28.6", "@babel/types": "^7.29.0" } }, "sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw=="], - "@babel/parser": ["@babel/parser@7.29.2", "", { "dependencies": { "@babel/types": "^7.29.0" }, "bin": "./bin/babel-parser.js" }, "sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA=="], + "@babel/parser": ["@babel/parser@7.29.3", "", { "dependencies": { "@babel/types": "^7.29.0" }, "bin": "./bin/babel-parser.js" }, "sha512-b3ctpQwp+PROvU/cttc4OYl4MzfJUWy6FZg+PMXfzmt/+39iHVF0sDfqay8TQM3JA2EUOyKcFZt75jWriQijsA=="], "@babel/plugin-transform-react-jsx-self": ["@babel/plugin-transform-react-jsx-self@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw=="], @@ -820,7 +819,7 @@ "@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="], - "@e2b/code-interpreter": ["@e2b/code-interpreter@2.4.1", "", { "dependencies": { "e2b": "^2.19.2" } }, "sha512-9T+NcQPtB3Utm0KAB3vdhx6vC1X+Y3cV6oydk2GnVuEqn0lUAY+9/8WdHuh/0l4L15aO2JynufP5oQwub7gDhw=="], + "@e2b/code-interpreter": ["@e2b/code-interpreter@2.4.2", "", { "dependencies": { "e2b": "^2.19.4" } }, "sha512-udLYysT+Jrue5citQc6Wr6N7Et9Eiw8FTeQpf6NQdLEg4RM6aZJoi7QFC0oqr+rv6g+I4W1KGdrxW1eBtKbnRw=="], "@electric-sql/client": ["@electric-sql/client@1.0.14", "", { "dependencies": { "@microsoft/fetch-event-source": "^2.0.1" }, "optionalDependencies": { "@rollup/rollup-darwin-arm64": "^4.18.1" } }, "sha512-LtPAfeMxXRiYS0hyDQ5hue2PjljUiK9stvzsVyVb4nwxWQxfOWTSF42bHTs/o5i3x1T4kAQ7mwHpxa4A+f8X7Q=="], @@ -916,7 +915,7 @@ "@iconify/types": ["@iconify/types@2.0.0", "", {}, "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg=="], - "@iconify/utils": ["@iconify/utils@3.1.1", "", { "dependencies": { "@antfu/install-pkg": "^1.1.0", "@iconify/types": "^2.0.0", "mlly": "^1.8.2" } }, "sha512-MwzoDtw9rO1x+qfgLTV/IVXsHDBqeYZoMIQC8SfxfYSlaSUG+oWiAcoiB1yajAda6mqblm4/1/w2E8tRu7a7Tw=="], + "@iconify/utils": ["@iconify/utils@3.1.2", "", { "dependencies": { "@antfu/install-pkg": "^1.1.0", "@iconify/types": "^2.0.0", "import-meta-resolve": "^4.2.0" } }, "sha512-jVf75icVVgSVGf9+QWBeCHdFL35yZ06HMHl9sCa059pITTP781lOacvRazfwAmXDKiBiUdQQMWVnuiw/RaQNhQ=="], "@img/colour": ["@img/colour@1.1.0", "", {}, "sha512-Td76q7j57o/tLVdgS746cYARfSyxk8iEfRxewL9h4OMzYhbW4TAcppl0mT4eyqXddh6L/jwoM75mo7ixa/pCeQ=="], @@ -994,8 +993,6 @@ "@jsonhero/path": ["@jsonhero/path@1.0.21", "", {}, "sha512-gVUDj/92acpVoJwsVJ/RuWOaHyG4oFzn898WNGQItLCTQ+hOaVlEaImhwE1WqOTf+l3dGOUkbSiVKlb3q1hd1Q=="], - "@kurkle/color": ["@kurkle/color@0.3.4", "", {}, "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w=="], - "@langchain/core": ["@langchain/core@0.3.80", "", { "dependencies": { "@cfworker/json-schema": "^4.0.2", "ansi-styles": "^5.0.0", "camelcase": "6", "decamelize": "1.2.0", "js-tiktoken": "^1.0.12", "langsmith": "^0.3.67", "mustache": "^4.2.0", "p-queue": "^6.6.2", "p-retry": "4", "uuid": "^10.0.0", "zod": "^3.25.32", "zod-to-json-schema": "^3.22.3" } }, "sha512-vcJDV2vk1AlCwSh3aBm/urQ1ZrlXFFBocv11bz/NBUfLWD5/UDNMzwPdaAd2dKvNmTWa9FM2lirLU3+JCf4cRA=="], "@langchain/openai": ["@langchain/openai@0.4.9", "", { "dependencies": { "js-tiktoken": "^1.0.12", "openai": "^4.87.3", "zod": "^3.22.4", "zod-to-json-schema": "^3.22.3" }, "peerDependencies": { "@langchain/core": ">=0.3.39 <0.4.0" } }, "sha512-NAsaionRHNdqaMjVLPkFCyjUDze+OqRHghA1Cn4fPoAafz+FXcl9c7LlEl9Xo0FH6/8yiCl7Rw2t780C/SBVxQ=="], @@ -1020,7 +1017,7 @@ "@monaco-editor/react": ["@monaco-editor/react@4.7.0", "", { "dependencies": { "@monaco-editor/loader": "^1.5.0" }, "peerDependencies": { "monaco-editor": ">= 0.25.0 < 1", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA=="], - "@mongodb-js/saslprep": ["@mongodb-js/saslprep@1.4.9", "", { "dependencies": { "sparse-bitfield": "^3.0.3" } }, "sha512-RXSxsokhAF/4nWys8An8npsqOI33Ex1Hlzqjw2pZOO+GKtMAR2noGnUdsFiGwsaO/xXI+56mtjTmDA3JXJsvmA=="], + "@mongodb-js/saslprep": ["@mongodb-js/saslprep@1.4.11", "", { "dependencies": { "sparse-bitfield": "^3.0.3" } }, "sha512-o9rAHc0IpIjuPSxRutWpE1F62x7n+4mVS4rCNHkzhIUMQcc18bb6xEq5wd2NdN0WjepIyXIppRshYI2kQDOZVA=="], "@napi-rs/canvas": ["@napi-rs/canvas@0.1.100", "", { "optionalDependencies": { "@napi-rs/canvas-android-arm64": "0.1.100", "@napi-rs/canvas-darwin-arm64": "0.1.100", "@napi-rs/canvas-darwin-x64": "0.1.100", "@napi-rs/canvas-linux-arm-gnueabihf": "0.1.100", "@napi-rs/canvas-linux-arm64-gnu": "0.1.100", "@napi-rs/canvas-linux-arm64-musl": "0.1.100", "@napi-rs/canvas-linux-riscv64-gnu": "0.1.100", "@napi-rs/canvas-linux-x64-gnu": "0.1.100", "@napi-rs/canvas-linux-x64-musl": "0.1.100", "@napi-rs/canvas-win32-arm64-msvc": "0.1.100", "@napi-rs/canvas-win32-x64-msvc": "0.1.100" } }, "sha512-xglYA6q3XO5P3BNJYxVZ1IV7DLVjp1Py6nwag88YntrS+3vKHyYcMqXVS4ZztJmwz2uGvz1FWhI/4LgbR5uQDA=="], @@ -1144,7 +1141,7 @@ "@opentelemetry/propagator-jaeger": ["@opentelemetry/propagator-jaeger@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-Mbm/LSFyAtQKP0AQah4AfGgsD+vsZcyreZoQ5okFBk33hU7AquU4TltgyL9dvaO8/Zkoud8/0gEvwfOZ5d7EPA=="], - "@opentelemetry/resources": ["@opentelemetry/resources@2.7.0", "", { "dependencies": { "@opentelemetry/core": "2.7.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-K+oi0hNMv94EpZbnW3eyu2X6SGVpD3O5DhG2NIp65Hc7lhAj9brRXTAVzh3wB82+q3ThakEf7Zd7RsFUqcTc7A=="], + "@opentelemetry/resources": ["@opentelemetry/resources@2.7.1", "", { "dependencies": { "@opentelemetry/core": "2.7.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-DeT6KKolmC4e/dRQvMQ/RwlnzhaqeiFOXY5ngoOPJ07GgVVKxZOg9EcrNZb5aTzUn+iCrJldAgOfQm1O/QfPAQ=="], "@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.200.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.200.0", "@opentelemetry/core": "2.0.0", "@opentelemetry/resources": "2.0.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-VZG870063NLfObmQQNtCVcdXXLzI3vOjjrRENmU37HYiPFa0ZXpXVDsTD02Nh3AT3xYJzQaWKl2X2lQ2l7TWJA=="], @@ -1164,27 +1161,29 @@ "@pdf-lib/upng": ["@pdf-lib/upng@1.0.1", "", { "dependencies": { "pako": "^1.0.10" } }, "sha512-dQK2FUMQtowVP00mtIksrlZhdFXQZPC+taih1q4CvPZ5vqdxR/LKBaFg0oAfzd1GlHZXXSPdQfzQnt+ViGvEIQ=="], - "@peculiar/asn1-android": ["@peculiar/asn1-android@2.6.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-cBRCKtYPF7vJGN76/yG8VbxRcHLPF3HnkoHhKOZeHpoVtbMYfY9ROKtH3DtYUY9m8uI1Mh47PRhHf2hSK3xcSQ=="], + "@peculiar/asn1-android": ["@peculiar/asn1-android@2.7.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-iD3VskhVQnM4nE3PN9cBdPTR7JrqZy3FYk+uD2CeG6DUqKoANqaEfx0f7izPmW+Qm5JBM35ek+viLCmjy18ByQ=="], - "@peculiar/asn1-cms": ["@peculiar/asn1-cms@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "@peculiar/asn1-x509-attr": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-vdG4fBF6Lkirkcl53q6eOdn3XYKt+kJTG59edgRZORlg/3atWWEReRCx5rYE1ZzTTX6vLK5zDMjHh7vbrcXGtw=="], + "@peculiar/asn1-cms": ["@peculiar/asn1-cms@2.7.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.7.0", "@peculiar/asn1-x509": "^2.7.0", "@peculiar/asn1-x509-attr": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-hew63shtzzvBcSHbhm+cyAmKe6AIfinT9hzEqSPjDC6opTTMKmTkQ0gHuN2KsWlvqiKw1S/fS94fhag/FJkioQ=="], - "@peculiar/asn1-csr": ["@peculiar/asn1-csr@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-WRWnKfIocHyzFYQTka8O/tXCiBquAPSrRjXbOkHbO4qdmS6loffCEGs+rby6WxxGdJCuunnhS2duHURhjyio6w=="], + "@peculiar/asn1-csr": ["@peculiar/asn1-csr@2.7.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.7.0", "@peculiar/asn1-x509": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-VVsAyGqErT9D1SY4aEqozThXMVI+ssVRiv2DDeYuvpBKLIgZ3hYs3Ay3u/VSoKq6ESFi9cf6rf3IOOzfwh7oMA=="], - "@peculiar/asn1-ecc": ["@peculiar/asn1-ecc@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-+Vqw8WFxrtDIN5ehUdvlN2m73exS2JVG0UAyfVB31gIfor3zWEAQPD+K9ydCxaj3MLen9k0JhKpu9LqviuCE1g=="], + "@peculiar/asn1-ecc": ["@peculiar/asn1-ecc@2.7.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.7.0", "@peculiar/asn1-x509": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-n7KEs/Q/wrB415cxy4fHOBhegp4NdJ15fkJPwcB/3/8iNBQC2L/N7SChJPKDJPZGYH0jD4Tg4/0vnHmwghnbKw=="], - "@peculiar/asn1-pfx": ["@peculiar/asn1-pfx@2.6.1", "", { "dependencies": { "@peculiar/asn1-cms": "^2.6.1", "@peculiar/asn1-pkcs8": "^2.6.1", "@peculiar/asn1-rsa": "^2.6.1", "@peculiar/asn1-schema": "^2.6.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-nB5jVQy3MAAWvq0KY0R2JUZG8bO/bTLpnwyOzXyEh/e54ynGTatAR+csOnXkkVD9AFZ2uL8Z7EV918+qB1qDvw=="], + "@peculiar/asn1-pfx": ["@peculiar/asn1-pfx@2.7.0", "", { "dependencies": { "@peculiar/asn1-cms": "^2.7.0", "@peculiar/asn1-pkcs8": "^2.7.0", "@peculiar/asn1-rsa": "^2.7.0", "@peculiar/asn1-schema": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-V/nrlQVmhg7lYAsM7E13UDL5erAwFv6kCIVFqNaMIHSVi7dngcT839JkRTkQBqznMG98l2XjxYk74ZztAohZzA=="], - "@peculiar/asn1-pkcs8": ["@peculiar/asn1-pkcs8@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-JB5iQ9Izn5yGMw3ZG4Nw3Xn/hb/G38GYF3lf7WmJb8JZUydhVGEjK/ZlFSWhnlB7K/4oqEs8HnfFIKklhR58Tw=="], + "@peculiar/asn1-pkcs8": ["@peculiar/asn1-pkcs8@2.7.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.7.0", "@peculiar/asn1-x509": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-9GTl1nE8Mx1kTZ+7QyYatDyKsm34QcWRBFkY1iPvWC3X4Dona5s/tlLiQsx5WzVdZqiMBZNYT0buyw4/vbhnjw=="], - "@peculiar/asn1-pkcs9": ["@peculiar/asn1-pkcs9@2.6.1", "", { "dependencies": { "@peculiar/asn1-cms": "^2.6.1", "@peculiar/asn1-pfx": "^2.6.1", "@peculiar/asn1-pkcs8": "^2.6.1", "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "@peculiar/asn1-x509-attr": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-5EV8nZoMSxeWmcxWmmcolg22ojZRgJg+Y9MX2fnE2bGRo5KQLqV5IL9kdSQDZxlHz95tHvIq9F//bvL1OeNILw=="], + "@peculiar/asn1-pkcs9": ["@peculiar/asn1-pkcs9@2.7.0", "", { "dependencies": { "@peculiar/asn1-cms": "^2.7.0", "@peculiar/asn1-pfx": "^2.7.0", "@peculiar/asn1-pkcs8": "^2.7.0", "@peculiar/asn1-schema": "^2.7.0", "@peculiar/asn1-x509": "^2.7.0", "@peculiar/asn1-x509-attr": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-Bh7m+OuIaSEllPQcSd9OSp93F4ROWH7sbITWV8MI+8dwsjE5111/87VxiWVvYFKyww3vp39geLv9ENqhwWHcew=="], - "@peculiar/asn1-rsa": ["@peculiar/asn1-rsa@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-1nVMEh46SElUt5CB3RUTV4EG/z7iYc7EoaDY5ECwganibQPkZ/Y2eMsTKB/LeyrUJ+W/tKoD9WUqIy8vB+CEdA=="], + "@peculiar/asn1-rsa": ["@peculiar/asn1-rsa@2.7.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.7.0", "@peculiar/asn1-x509": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-/qvENQrXyTZURjMqSeofHul0JJt2sNSzSwk36pl2olkHbaioMQgrASDZAlHXl0xUlnVbHj0uGgOrBMTb5x2aJQ=="], - "@peculiar/asn1-schema": ["@peculiar/asn1-schema@2.6.0", "", { "dependencies": { "asn1js": "^3.0.6", "pvtsutils": "^1.3.6", "tslib": "^2.8.1" } }, "sha512-xNLYLBFTBKkCzEZIw842BxytQQATQv+lDTCEMZ8C196iJcJJMBUZxrhSTxLaohMyKK8QlzRNTRkUmanucnDSqg=="], + "@peculiar/asn1-schema": ["@peculiar/asn1-schema@2.7.0", "", { "dependencies": { "@peculiar/utils": "^2.0.2", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-W8ZfWzLmQnrcky+eh3tni4IozMdqBDiHWU0N+vve/UGjMaUs8c0L7A2oEdkBXS8rTpWDpK/aoI3DG/L/hxmxPg=="], - "@peculiar/asn1-x509": ["@peculiar/asn1-x509@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "asn1js": "^3.0.6", "pvtsutils": "^1.3.6", "tslib": "^2.8.1" } }, "sha512-O9jT5F1A2+t3r7C4VT7LYGXqkGLK7Kj1xFpz7U0isPrubwU5PbDoyYtx6MiGst29yq7pXN5vZbQFKRCP+lLZlA=="], + "@peculiar/asn1-x509": ["@peculiar/asn1-x509@2.7.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.7.0", "@peculiar/utils": "^2.0.2", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-mUn9RRrkGDnG4ALfunDmzyRW5dg+sWCj/pfnCCqEHYbkGxEpvUt6iVJv8Yw1cyp6SWZ26ZE5oSmI5SqEaen15g=="], - "@peculiar/asn1-x509-attr": ["@peculiar/asn1-x509-attr@2.6.1", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.1", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-tlW6cxoHwgcQghnJwv3YS+9OO1737zgPogZ+CgWRUK4roEwIPzRH4JEiG770xe5HX2ATfCpmX60gurfWIF9dcQ=="], + "@peculiar/asn1-x509-attr": ["@peculiar/asn1-x509-attr@2.7.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.7.0", "@peculiar/asn1-x509": "^2.7.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-NS8e7SOgXipkzUPLF/sce7ukpMpWjhxYsH0n6Y+bHYo4TTxOb95Zv7hqwSuL212mj5YxovjdOKQOgH1As3E94w=="], + + "@peculiar/utils": ["@peculiar/utils@2.0.3", "", { "dependencies": { "tslib": "^2.8.1" } }, "sha512-+oL3HPFRIZ1St2K50lWCXiioIgSoxzz7R1J3uF6neO2yl1sgmpgY6XXJH4BdpoDkMWznQTeYF6oWNDZLCdQ4eQ=="], "@peculiar/x509": ["@peculiar/x509@1.14.3", "", { "dependencies": { "@peculiar/asn1-cms": "^2.6.0", "@peculiar/asn1-csr": "^2.6.0", "@peculiar/asn1-ecc": "^2.6.0", "@peculiar/asn1-pkcs9": "^2.6.0", "@peculiar/asn1-rsa": "^2.6.0", "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.0", "pvtsutils": "^1.3.6", "reflect-metadata": "^0.2.2", "tslib": "^2.8.1", "tsyringe": "^4.10.0" } }, "sha512-C2Xj8FZ0uHWeCXXqX5B4/gVFQmtSkiuOolzAgutjTfseNOHT3pUjljDZsTSxXFGgio54bCzVFqmEOUrIVk8RDA=="], @@ -1388,55 +1387,55 @@ "@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.27", "", {}, "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA=="], - "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.60.2", "", { "os": "android", "cpu": "arm" }, "sha512-dnlp69efPPg6Uaw2dVqzWRfAWRnYVb1XJ8CyyhIbZeaq4CA5/mLeZ1IEt9QqQxmbdvagjLIm2ZL8BxXv5lH4Yw=="], + "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.60.3", "", { "os": "android", "cpu": "arm" }, "sha512-x35CNW/ANXG3hE/EZpRU8MXX1JDN86hBb2wMGAtltkz7pc6cxgjpy1OMMfDosOQ+2hWqIkag/fGok1Yady9nGw=="], - "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.60.2", "", { "os": "android", "cpu": "arm64" }, "sha512-OqZTwDRDchGRHHm/hwLOL7uVPB9aUvI0am/eQuWMNyFHf5PSEQmyEeYYheA0EPPKUO/l0uigCp+iaTjoLjVoHg=="], + "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.60.3", "", { "os": "android", "cpu": "arm64" }, "sha512-xw3xtkDApIOGayehp2+Rz4zimfkaX65r4t47iy+ymQB2G4iJCBBfj0ogVg5jpvjpn8UWn/+q9tprxleYeNp3Hw=="], - "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.60.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-UwRE7CGpvSVEQS8gUMBe1uADWjNnVgP3Iusyda1nSRwNDCsRjnGc7w6El6WLQsXmZTbLZx9cecegumcitNfpmA=="], + "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.60.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-vo6Y5Qfpx7/5EaamIwi0WqW2+zfiusVihKatLvtN1VFVy3D13uERk/6gZLU1UiHRL6fDXqj/ELIeVRGnvcTE1g=="], - "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.60.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-gjEtURKLCC5VXm1I+2i1u9OhxFsKAQJKTVB8WvDAHF+oZlq0GTVFOlTlO1q3AlCTE/DF32c16ESvfgqR7343/g=="], + "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.60.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-D+0QGcZhBzTN82weOnsSlY7V7+RMmPuF1CkbxyMAGE8+ZHeUjyb76ZiWmBlCu//AQQONvxcqRbwZTajZKqjuOw=="], - "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.60.2", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-Bcl6CYDeAgE70cqZaMojOi/eK63h5Me97ZqAQoh77VPjMysA/4ORQBRGo3rRy45x4MzVlU9uZxs8Uwy7ZaKnBw=="], + "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.60.3", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-6HnvHCT7fDyj6R0Ph7A6x8dQS/S38MClRWeDLqc0MdfWkxjiu1HSDYrdPhqSILzjTIC/pnXbbJbo+ft+gy/9hQ=="], - "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.60.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-LU+TPda3mAE2QB0/Hp5VyeKJivpC6+tlOXd1VMoXV/YFMvk/MNk5iXeBfB4MQGRWyOYVJ01625vjkr0Az98OJQ=="], + "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.60.3", "", { "os": "freebsd", "cpu": "x64" }, "sha512-KHLgC3WKlUYW3ShFKnnosZDOJ0xjg9zp7au3sIm2bs/tGBeC2ipmvRh/N7JKi0t9Ue20C0dpEshi8WUubg+cnA=="], - "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.60.2", "", { "os": "linux", "cpu": "arm" }, "sha512-2QxQrM+KQ7DAW4o22j+XZ6RKdxjLD7BOWTP0Bv0tmjdyhXSsr2Ul1oJDQqh9Zf5qOwTuTc7Ek83mOFaKnodPjg=="], + "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.60.3", "", { "os": "linux", "cpu": "arm" }, "sha512-DV6fJoxEYWJOvaZIsok7KrYl0tPvga5OZ2yvKHNNYyk/2roMLqQAbGhr78EQ5YhHpnhLKJD3S1WFusAkmUuV5g=="], - "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.60.2", "", { "os": "linux", "cpu": "arm" }, "sha512-TbziEu2DVsTEOPif2mKWkMeDMLoYjx95oESa9fkQQK7r/Orta0gnkcDpzwufEcAO2BLBsD7mZkXGFqEdMRRwfw=="], + "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.60.3", "", { "os": "linux", "cpu": "arm" }, "sha512-mQKoJAzvuOs6F+TZybQO4GOTSMUu7v0WdxEk24krQ/uUxXoPTtHjuaUuPmFhtBcM4K0ons8nrE3JyhTuCFtT/w=="], - "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.60.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-bO/rVDiDUuM2YfuCUwZ1t1cP+/yqjqz+Xf2VtkdppefuOFS2OSeAfgafaHNkFn0t02hEyXngZkxtGqXcXwO8Rg=="], + "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.60.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-Whjj2qoiJ6+OOJMGptTYazaJvjOJm+iKHpXQM1P3LzGjt7Ff++Tp7nH4N8J/BUA7R9IHfDyx4DJIflifwnbmIA=="], - "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.60.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-hr26p7e93Rl0Za+JwW7EAnwAvKkehh12BU1Llm9Ykiibg4uIr2rbpxG9WCf56GuvidlTG9KiiQT/TXT1yAWxTA=="], + "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.60.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-4YTNHKqGng5+yiZt3mg77nmyuCfmNfX4fPmyUapBcIk+BdwSwmCWGXOUxhXbBEkFHtoN5boLj/5NON+u5QC9tg=="], - "@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.60.2", "", { "os": "linux", "cpu": "none" }, "sha512-pOjB/uSIyDt+ow3k/RcLvUAOGpysT2phDn7TTUB3n75SlIgZzM6NKAqlErPhoFU+npgY3/n+2HYIQVbF70P9/A=="], + "@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.60.3", "", { "os": "linux", "cpu": "none" }, "sha512-SU3kNlhkpI4UqlUc2VXPGK9o886ZsSeGfMAX2ba2b8DKmMXq4AL7KUrkSWVbb7koVqx41Yczx6dx5PNargIrEA=="], - "@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.60.2", "", { "os": "linux", "cpu": "none" }, "sha512-2/w+q8jszv9Ww1c+6uJT3OwqhdmGP2/4T17cu8WuwyUuuaCDDJ2ojdyYwZzCxx0GcsZBhzi3HmH+J5pZNXnd+Q=="], + "@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.60.3", "", { "os": "linux", "cpu": "none" }, "sha512-6lDLl5h4TXpB1mTf2rQWnAk/LcXrx9vBfu/DT5TIPhvMhRWaZ5MxkIc8u4lJAmBo6klTe1ywXIUHFjylW505sg=="], - "@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.60.2", "", { "os": "linux", "cpu": "ppc64" }, "sha512-11+aL5vKheYgczxtPVVRhdptAM2H7fcDR5Gw4/bTcteuZBlH4oP9f5s9zYO9aGZvoGeBpqXI/9TZZihZ609wKw=="], + "@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.60.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-BMo8bOw8evlup/8G+cj5xWtPyp93xPdyoSN16Zy90Q2QZ0ZYRhCt6ZJSwbrRzG9HApFabjwj2p25TUPDWrhzqQ=="], - "@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.60.2", "", { "os": "linux", "cpu": "ppc64" }, "sha512-i16fokAGK46IVZuV8LIIwMdtqhin9hfYkCh8pf8iC3QU3LpwL+1FSFGej+O7l3E/AoknL6Dclh2oTdnRMpTzFQ=="], + "@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.60.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-E0L8X1dZN1/Rph+5VPF6Xj2G7JJvMACVXtamTJIDrVI44Y3K+G8gQaMEAavbqCGTa16InptiVrX6eM6pmJ+7qA=="], - "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.60.2", "", { "os": "linux", "cpu": "none" }, "sha512-49FkKS6RGQoriDSK/6E2GkAsAuU5kETFCh7pG4yD/ylj9rKhTmO3elsnmBvRD4PgJPds5W2PkhC82aVwmUcJ7A=="], + "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.60.3", "", { "os": "linux", "cpu": "none" }, "sha512-oZJ/WHaVfHUiRAtmTAeo3DcevNsVvH8mbvodjZy7D5QKvCefO371SiKRpxoDcCxB3PTRTLayWBkvmDQKTcX/sw=="], - "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.60.2", "", { "os": "linux", "cpu": "none" }, "sha512-mjYNkHPfGpUR00DuM1ZZIgs64Hpf4bWcz9Z41+4Q+pgDx73UwWdAYyf6EG/lRFldmdHHzgrYyge5akFUW0D3mQ=="], + "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.60.3", "", { "os": "linux", "cpu": "none" }, "sha512-Dhbyh7j9FybM3YaTgaHmVALwA8AkUwTPccyCQ79TG9AJUsMQqgN1DDEZNr4+QUfwiWvLDumW5vdwzoeUF+TNxQ=="], - "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.60.2", "", { "os": "linux", "cpu": "s390x" }, "sha512-ALyvJz965BQk8E9Al/JDKKDLH2kfKFLTGMlgkAbbYtZuJt9LU8DW3ZoDMCtQpXAltZxwBHevXz5u+gf0yA0YoA=="], + "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.60.3", "", { "os": "linux", "cpu": "s390x" }, "sha512-cJd1X5XhHHlltkaypz1UcWLA8AcoIi1aWhsvaWDskD1oz2eKCypnqvTQ8ykMNI0RSmm7NkTdSqSSD7zM0xa6Ig=="], - "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.60.2", "", { "os": "linux", "cpu": "x64" }, "sha512-UQjrkIdWrKI626Du8lCQ6MJp/6V1LAo2bOK9OTu4mSn8GGXIkPXk/Vsp4bLHCd9Z9Iz2OTEaokUE90VweJgIYQ=="], + "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.60.3", "", { "os": "linux", "cpu": "x64" }, "sha512-DAZDBHQfG2oQuhY7mc6I3/qB4LU2fQCjRvxbDwd/Jdvb9fypP4IJ4qmtu6lNjes6B531AI8cg1aKC2di97bUxA=="], - "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.60.2", "", { "os": "linux", "cpu": "x64" }, "sha512-bTsRGj6VlSdn/XD4CGyzMnzaBs9bsRxy79eTqTCBsA8TMIEky7qg48aPkvJvFe1HyzQ5oMZdg7AnVlWQSKLTnw=="], + "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.60.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cRxsE8c13mZOh3vP+wLDxpQBRrOHDIGOWyDL93Sy0Ga8y515fBcC2pjUfFwUe5T7tqvTvWbCpg1URM/AXdWIXA=="], - "@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.60.2", "", { "os": "openbsd", "cpu": "x64" }, "sha512-6d4Z3534xitaA1FcMWP7mQPq5zGwBmGbhphh2DwaA1aNIXUu3KTOfwrWpbwI4/Gr0uANo7NTtaykFyO2hPuFLg=="], + "@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.60.3", "", { "os": "openbsd", "cpu": "x64" }, "sha512-QaWcIgRxqEdQdhJqW4DJctsH6HCmo5vHxY0krHSX4jMtOqfzC+dqDGuHM87bu4H8JBeibWx7jFz+h6/4C8wA5Q=="], - "@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.60.2", "", { "os": "none", "cpu": "arm64" }, "sha512-NetAg5iO2uN7eB8zE5qrZ3CSil+7IJt4WDFLcC75Ymywq1VZVD6qJ6EvNLjZ3rEm6gB7XW5JdT60c6MN35Z85Q=="], + "@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.60.3", "", { "os": "none", "cpu": "arm64" }, "sha512-AaXwSvUi3QIPtroAUw1t5yHGIyqKEXwH54WUocFolZhpGDruJcs8c+xPNDRn4XiQsS7MEwnYsHW2l0MBLDMkWg=="], - "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.60.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-NCYhOotpgWZ5kdxCZsv6Iudx0wX8980Q/oW4pNFNihpBKsDbEA1zpkfxJGC0yugsUuyDZ7gL37dbzwhR0VI7pQ=="], + "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.60.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-65LAKM/bAWDqKNEelHlcHvm2V+Vfb8C6INFxQXRHCvaVN1rJfwr4NvdP4FyzUaLqWfaCGaadf6UbTm8xJeYfEg=="], - "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.60.2", "", { "os": "win32", "cpu": "ia32" }, "sha512-RXsaOqXxfoUBQoOgvmmijVxJnW2IGB0eoMO7F8FAjaj0UTywUO/luSqimWBJn04WNgUkeNhh7fs7pESXajWmkg=="], + "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.60.3", "", { "os": "win32", "cpu": "ia32" }, "sha512-EEM2gyhBF5MFnI6vMKdX1LAosE627RGBzIoGMdLloPZkXrUN0Ckqgr2Qi8+J3zip/8NVVro3/FjB+tjhZUgUHA=="], - "@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.60.2", "", { "os": "win32", "cpu": "x64" }, "sha512-qdAzEULD+/hzObedtmV6iBpdL5TIbKVztGiK7O3/KYSf+HIzU257+MX1EXJcyIiDbMAqmbwaufcYPvyRryeZtA=="], + "@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.60.3", "", { "os": "win32", "cpu": "x64" }, "sha512-E5Eb5H/DpxaoXH++Qkv28RcUJboMopmdDUALBczvHMf7hNIxaDZqwY5lK12UK1BHacSmvupoEWGu+n993Z0y1A=="], - "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.60.2", "", { "os": "win32", "cpu": "x64" }, "sha512-Nd/SgG27WoA9e+/TdK74KnHz852TLa94ovOYySo/yMPuTmpckK/jIF2jSwS3g7ELSKXK13/cVdmg1Z/DaCWKxA=="], + "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.60.3", "", { "os": "win32", "cpu": "x64" }, "sha512-hPt/bgL5cE+Qp+/TPHBqptcAgPzgj46mPcg/16zNUmbQk0j+mOEQV/+Lqu8QRtDV3Ek95Q6FeFITpuhl6OTsAA=="], "@s2-dev/streamstore": ["@s2-dev/streamstore@0.22.5", "", { "dependencies": { "@protobuf-ts/runtime": "^2.11.1", "debug": "^4.4.3" } }, "sha512-GqdOKIbIoIxT+40fnKzHbrsHB6gBqKdECmFe7D3Ojk4FoN1Hu0LhFzZv6ZmVMjoHHU+55debS1xSWjZwQmbIyQ=="], @@ -1580,7 +1579,7 @@ "@smithy/util-middleware": ["@smithy/util-middleware@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-1Su2vj9RYNDEv/V+2E+jXkkwGsgR7dc4sfHn9Z7ruzQHJIEni9zzw5CauvRXlFJfmgcqYP8fWa0dkh2Q2YaQyw=="], - "@smithy/util-retry": ["@smithy/util-retry@4.3.6", "", { "dependencies": { "@smithy/service-error-classification": "^4.3.1", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-p6/FO1n2KxMeQyna067i0uJ6TSbb165ZhnRtCpWh4Foxqbfc6oW+XITaL8QkFJj3KFnDe2URt4gOhgU06EP9ew=="], + "@smithy/util-retry": ["@smithy/util-retry@4.3.8", "", { "dependencies": { "@smithy/service-error-classification": "^4.3.1", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-LUIxbTBi+OpvXpg91poGA6BdyoleMDLnfXjVDqyi2RvZmTveY5loE/FgYUBCR5LU2BThW2SoZRh8dTIIy38IPw=="], "@smithy/util-stream": ["@smithy/util-stream@4.5.25", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.17", "@smithy/node-http-handler": "^4.6.1", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-/PFpG4k8Ze8Ei+mMKj3oiPICYekthuzePZMgZbCqMiXIHHf4n2aZ4Ps0aSRShycFTGuj/J6XldmC0x0DwednIA=="], @@ -1608,9 +1607,9 @@ "@t3-oss/env-nextjs": ["@t3-oss/env-nextjs@0.13.4", "", { "dependencies": { "@t3-oss/env-core": "0.13.4" }, "peerDependencies": { "typescript": ">=5.0.0", "valibot": "^1.0.0-beta.7 || ^1.0.0", "zod": "^3.24.0 || ^4.0.0-beta.0" }, "optionalPeers": ["typescript", "valibot", "zod"] }, "sha512-6ecXR7SH7zJKVcBODIkB7wV9QLMU23uV8D9ec6P+ULHJ5Ea/YXEHo+Z/2hSYip5i9ptD/qZh8VuOXyldspvTTg=="], - "@tabler/icons": ["@tabler/icons@3.41.1", "", {}, "sha512-OaRnVbRmH2nHtFeg+RmMJ/7m2oBIF9XCJAUD5gQnMrpK9f05ydj8MZrAf3NZQqOXyxGN1UBL0D5IKLLEUfr74Q=="], + "@tabler/icons": ["@tabler/icons@3.42.0", "", {}, "sha512-h0nFIRgwrE/9iVgN+GuLijbiLIBWJ3chNvIWhqUZhy4D9fv3tkoQ3EYFAvxvfdvQUNNVAhJhj+ar54y6t016Vg=="], - "@tabler/icons-react": ["@tabler/icons-react@3.41.1", "", { "dependencies": { "@tabler/icons": "3.41.1" }, "peerDependencies": { "react": ">= 16" } }, "sha512-kUgweE+DJtAlMZVIns1FTDdcbpRVnkK7ZpUOXmoxy3JAF0rSHj0TcP4VHF14+gMJGnF+psH2Zt26BLT6owetBA=="], + "@tabler/icons-react": ["@tabler/icons-react@3.42.0", "", { "dependencies": { "@tabler/icons": "3.42.0" }, "peerDependencies": { "react": ">= 16" } }, "sha512-WvKhHYLdJaZbiY4Jm31fmTbzIwxokXcE1HM/m9rmXvh7UoHG4mM8n+9NOB6xEwB5SZQ+G/Z102eMj1F3NqDMVg=="], "@tailwindcss/node": ["@tailwindcss/node@4.2.4", "", { "dependencies": { "@jridgewell/remapping": "^2.3.5", "enhanced-resolve": "^5.19.0", "jiti": "^2.6.1", "lightningcss": "1.32.0", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.2.4" } }, "sha512-Ai7+yQPxz3ddrDQzFfBKdHEVBg0w3Zl83jnjuwxnZOsnH9pGn93QHQtpU0p/8rYWxvbFZHneni6p1BSLK4DkGA=="], @@ -1838,7 +1837,7 @@ "@typespec/ts-http-runtime": ["@typespec/ts-http-runtime@0.3.5", "", { "dependencies": { "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.0", "tslib": "^2.6.2" } }, "sha512-yURCknZhvywvQItHMMmFSo+fq5arCUIyz/CVk7jD89MSai7dkaX8ufjCWp3NttLojoTVbcE72ri+be/TnEbMHw=="], - "@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="], + "@ungap/structured-clone": ["@ungap/structured-clone@1.3.1", "", {}, "sha512-mUFwbeTqrVgDQxFveS+df2yfap6iuP20NAKAsBt5jDEoOTDew+zwLAOilHCeQJOVSvmgCX4ogqIrA0mnyr08yQ=="], "@upsetjs/venn.js": ["@upsetjs/venn.js@2.0.0", "", { "optionalDependencies": { "d3-selection": "^3.0.0", "d3-transition": "^3.0.1" } }, "sha512-WbBhLrooyePuQ1VZxrJjtLvTc4NVfpOyKx0sKqioq9bX1C1m7Jgykkn8gLrtwumBioXIqam8DLxp88Adbue6Hw=="], @@ -1886,7 +1885,7 @@ "agentkeepalive": ["agentkeepalive@4.6.0", "", { "dependencies": { "humanize-ms": "^1.2.1" } }, "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ=="], - "ai": ["ai@5.0.180", "", { "dependencies": { "@ai-sdk/gateway": "2.0.83", "@ai-sdk/provider": "2.0.2", "@ai-sdk/provider-utils": "3.0.24", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-tJctEJpgyoJtD8lVDY67r2uuqiWPMNv9BRce5bKeOj7Rf8tkBPAr8MnzwhQD2ZHqdblvA8GcZ/855DsTbGdl0A=="], + "ai": ["ai@5.0.185", "", { "dependencies": { "@ai-sdk/gateway": "2.0.87", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-TQrpK5+R1xsQQH1YwY2Qnt1usZTVSDLiDg0Lda6vspC/G4a40aBs4b741Lr1ZNl8g1fu6gANyeK9C8Hz9p3O5A=="], "ajv": ["ajv@8.18.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A=="], @@ -1940,9 +1939,9 @@ "aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="], - "axios": ["axios@1.15.2", "", { "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", "proxy-from-env": "^2.1.0" } }, "sha512-wLrXxPtcrPTsNlJmKjkPnNPK2Ihe0hn0wGSaTEiHRPxwjvJwT3hKmXF4dpqxmPO9SoNb2FsYXj/xEo0gHN+D5A=="], + "axios": ["axios@1.16.0", "", { "dependencies": { "follow-redirects": "^1.16.0", "form-data": "^4.0.5", "proxy-from-env": "^2.1.0" } }, "sha512-6hp5CwvTPlN2A31g5dxnwAX0orzM7pmCRDLnZSX772mv8WDqICwFjowHuPs04Mc8deIld1+ejhtaMn5vp6b+1w=="], - "b4a": ["b4a@1.8.0", "", { "peerDependencies": { "react-native-b4a": "*" }, "optionalPeers": ["react-native-b4a"] }, "sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg=="], + "b4a": ["b4a@1.8.1", "", { "peerDependencies": { "react-native-b4a": "*" }, "optionalPeers": ["react-native-b4a"] }, "sha512-aiqre1Nr0B/6DgE2N5vwTc+2/oQZ4Wh1t4NznYY4E00y8LCt6NqdRv81so00oo27D8MVKTpUa/MwUUtBLXCoDw=="], "bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="], @@ -1952,19 +1951,19 @@ "bare-fs": ["bare-fs@4.7.1", "", { "dependencies": { "bare-events": "^2.5.4", "bare-path": "^3.0.0", "bare-stream": "^2.6.4", "bare-url": "^2.2.2", "fast-fifo": "^1.3.2" }, "peerDependencies": { "bare-buffer": "*" }, "optionalPeers": ["bare-buffer"] }, "sha512-WDRsyVN52eAx/lBamKD6uyw8H4228h/x0sGGGegOamM2cd7Pag88GfMQalobXI+HaEUxpCkbKQUDOQqt9wawRw=="], - "bare-os": ["bare-os@3.9.0", "", {}, "sha512-JTjuZyNIDpw+GytMO4a6TK1VXdVKKJr6DRxEHasyuYyShV2deuiHJK/ahGZlebc+SG0/wJCB9XK8gprBGDFi/Q=="], + "bare-os": ["bare-os@3.9.1", "", {}, "sha512-6M5XjcnsygQNPMCMPXSK379xrJFiZ/AEMNBmFEmQW8d/789VQATvriyi5r0HYTL9TkQ26rn3kgdTG3aisbrXkQ=="], "bare-path": ["bare-path@3.0.0", "", { "dependencies": { "bare-os": "^3.0.1" } }, "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw=="], "bare-stream": ["bare-stream@2.13.1", "", { "dependencies": { "streamx": "^2.25.0", "teex": "^1.0.1" }, "peerDependencies": { "bare-abort-controller": "*", "bare-buffer": "*", "bare-events": "*" }, "optionalPeers": ["bare-abort-controller", "bare-buffer", "bare-events"] }, "sha512-Vp0cnjYyrEC4whYTymQ+YZi6pBpfiICZO3cfRG8sy67ZNWe951urv1x4eW1BKNngw3U+3fPYb5JQvHbCtxH7Ow=="], - "bare-url": ["bare-url@2.4.2", "", { "dependencies": { "bare-path": "^3.0.0" } }, "sha512-/9a2j4ac6ckpmAHvod/ob7x439OAHst/drc2Clnq+reRYd/ovddwcF4LfoxHyNk5AuGBnPg+HqFjmE/Zpq6v0A=="], + "bare-url": ["bare-url@2.4.3", "", { "dependencies": { "bare-path": "^3.0.0" } }, "sha512-Kccpc7ACfXaxfeInfqKcZtW4pT5YBn1mesc4sCsun6sRwtbJ4h+sNOaksUpYEJUKfN65YWC6Bw2OJEFiKxq8nQ=="], "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], "base64id": ["base64id@2.0.0", "", {}, "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog=="], - "baseline-browser-mapping": ["baseline-browser-mapping@2.10.24", "", { "bin": { "baseline-browser-mapping": "dist/cli.cjs" } }, "sha512-I2NkZOOrj2XuguvWCK6OVh9GavsNjZjK908Rq3mIBK25+GD8vPX5w2WdxVqnQ7xx3SrZJiCiZFu+/Oz50oSYSA=="], + "baseline-browser-mapping": ["baseline-browser-mapping@2.10.27", "", { "bin": { "baseline-browser-mapping": "dist/cli.cjs" } }, "sha512-zEs/ufmZoUd7WftKpKyXaT6RFxpQ5Qm9xytKRHvJfxFV9DFJkZph9RvJ1LcOUi0Z1ZVijMte65JbILeV+8QQEA=="], "basic-auth": ["basic-auth@2.0.1", "", { "dependencies": { "safe-buffer": "5.1.2" } }, "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg=="], @@ -2034,7 +2033,7 @@ "camelize": ["camelize@1.0.1", "", {}, "sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ=="], - "caniuse-lite": ["caniuse-lite@1.0.30001791", "", {}, "sha512-yk0l/YSrOnFZk3UROpDLQD9+kC1l4meK/wed583AXrzoarMGJcbRi2Q4RaUYbKxYAsZ8sWmaSa/DsLmdBeI1vQ=="], + "caniuse-lite": ["caniuse-lite@1.0.30001792", "", {}, "sha512-hVLMUZFgR4JJ6ACt1uEESvQN1/dBVqPAKY0hgrV70eN3391K6juAfTjKZLKvOMsx8PxA7gsY1/tLMMTcfFLLpw=="], "caseless": ["caseless@0.12.0", "", {}, "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw=="], @@ -2054,8 +2053,6 @@ "chardet": ["chardet@2.1.1", "", {}, "sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ=="], - "chart.js": ["chart.js@4.5.1", "", { "dependencies": { "@kurkle/color": "^0.3.0" } }, "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw=="], - "check-error": ["check-error@2.1.3", "", {}, "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA=="], "cheerio": ["cheerio@1.1.2", "", { "dependencies": { "cheerio-select": "^2.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.3", "domutils": "^3.2.2", "encoding-sniffer": "^0.2.1", "htmlparser2": "^10.0.0", "parse5": "^7.3.0", "parse5-htmlparser2-tree-adapter": "^7.1.0", "parse5-parser-stream": "^7.1.2", "undici": "^7.12.0", "whatwg-mimetype": "^4.0.0" } }, "sha512-IkxPpb5rS/d1IiLbHMgfPuS0FgiWTtFIm/Nj+2woXDLTZ7fOT2eqzgYbdMlLweqlHbsZjxEChoVK+7iph7jyQg=="], @@ -2064,7 +2061,7 @@ "chevrotain": ["chevrotain@12.0.0", "", { "dependencies": { "@chevrotain/cst-dts-gen": "12.0.0", "@chevrotain/gast": "12.0.0", "@chevrotain/regexp-to-ast": "12.0.0", "@chevrotain/types": "12.0.0", "@chevrotain/utils": "12.0.0" } }, "sha512-csJvb+6kEiQaqo1woTdSAuOWdN0WTLIydkKrBnS+V5gZz0oqBrp4kQ35519QgK6TpBThiG3V1vNSHlIkv4AglQ=="], - "chevrotain-allstar": ["chevrotain-allstar@0.4.1", "", { "dependencies": { "lodash-es": "^4.17.21" }, "peerDependencies": { "chevrotain": "^12.0.0" } }, "sha512-PvVJm3oGqrveUVW2Vt/eZGeiAIsJszYweUcYwcskg9e+IubNYKKD+rHHem7A6XVO22eDAL+inxNIGAzZ/VIWlA=="], + "chevrotain-allstar": ["chevrotain-allstar@0.4.3", "", { "dependencies": { "lodash-es": "^4.18.1" }, "peerDependencies": { "chevrotain": "^12.0.0" } }, "sha512-2X4mkroolSMKqW+H22pyPMUVDqYZzPhephTmg/NODKb1IGYPHfxfhcW0EjS7wcPJNbze2i4vBWT7zT5FKF2lrQ=="], "chokidar": ["chokidar@5.0.0", "", { "dependencies": { "readdirp": "^5.0.0" } }, "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw=="], @@ -2182,7 +2179,7 @@ "csv-parse": ["csv-parse@6.1.0", "", {}, "sha512-CEE+jwpgLn+MmtCpVcPtiCZpVtB6Z2OKPTr34pycYYoL7sxdOkXDdQ4lRiw6ioC0q6BLqhc6cKweCVvral8yhw=="], - "cytoscape": ["cytoscape@3.33.2", "", {}, "sha512-sj4HXd3DokGhzZAdjDejGvTPLqlt84vNFN8m7bGsOzDY5DyVcxIb2ejIXat2Iy7HxWhdT/N1oKyheJ5YdpsGuw=="], + "cytoscape": ["cytoscape@3.33.3", "", {}, "sha512-Gej7U+OKR+LZ8kvX7rb2HhCYJ0IhvEFsnkud4SB1PR+BUY/TsSO0dmOW59WEVLu51b1Rm+gQRKoz4bLYxGSZ2g=="], "cytoscape-cose-bilkent": ["cytoscape-cose-bilkent@4.1.0", "", { "dependencies": { "cose-base": "^1.0.0" }, "peerDependencies": { "cytoscape": "^3.2.0" } }, "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ=="], @@ -2336,7 +2333,7 @@ "domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="], - "dompurify": ["dompurify@3.4.1", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-JahakDAIg1gyOm7dlgWSDjV4n7Ip2PKR55NIT6jrMfIgLFgWo81vdr1/QGqWtFNRqXP9UV71oVePtjqS2ebnPw=="], + "dompurify": ["dompurify@3.4.2", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-lHeS9SA/IKeIFFyYciHBr2n0v1VMPlSj843HdLOwjb2OxNwdq9Xykxqhk+FE42MzAdHvInbAolSE4mhahPpjXA=="], "domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="], @@ -2354,7 +2351,7 @@ "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], - "e2b": ["e2b@2.19.2", "", { "dependencies": { "@bufbuild/protobuf": "^2.6.2", "@connectrpc/connect": "2.0.0-rc.3", "@connectrpc/connect-web": "2.0.0-rc.3", "chalk": "^5.3.0", "compare-versions": "^6.1.0", "dockerfile-ast": "^0.7.1", "glob": "^11.1.0", "openapi-fetch": "^0.14.1", "platform": "^1.3.6", "tar": "^7.5.11" } }, "sha512-AJtaQ72XIjdOBGnsvzVuYveYmy4ZDALLzZddN7sFIgd49eCY7u7Nwx7TXp97vZLPTEgfCwEqn1U9mehDrQMp3g=="], + "e2b": ["e2b@2.19.5", "", { "dependencies": { "@bufbuild/protobuf": "^2.6.2", "@connectrpc/connect": "2.0.0-rc.3", "@connectrpc/connect-web": "2.0.0-rc.3", "chalk": "^5.3.0", "compare-versions": "^6.1.0", "dockerfile-ast": "^0.7.1", "glob": "^11.1.0", "openapi-fetch": "^0.14.1", "platform": "^1.3.6", "tar": "^7.5.11", "undici": "^7.25.0" } }, "sha512-pd1LvDrf5CWn1kHRK0CzKvnHJGeoaH/4//QOhkV+oyFPhBNtvPnUvSv60Zd1vdcuNRFM5b3j0Krg+44hVkLuRg=="], "eastasianwidth": ["eastasianwidth@0.2.0", "", {}, "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="], @@ -2364,7 +2361,7 @@ "effect": ["effect@3.21.0", "", { "dependencies": { "@standard-schema/spec": "^1.0.0", "fast-check": "^3.23.1" } }, "sha512-PPN80qRokCd1f015IANNhrwOnLO7GrrMQfk4/lnZRE/8j7UPWrNNjPV0uBrZutI/nHzernbW+J0hdqQysHiSnQ=="], - "electron-to-chromium": ["electron-to-chromium@1.5.344", "", {}, "sha512-4MxfbmNDm+KPh066EZy+eUnkcDPcZ35wNmOWzFuh/ijvHsve6kbLTLURy88uCNK5FbpN+yk2nQY6BYh1GEt+wg=="], + "electron-to-chromium": ["electron-to-chromium@1.5.349", "", {}, "sha512-QsWVGyRuY07Aqb234QytTfwd5d9AJlfNIQ5wIOl1L+PZDzI9d9+Fn0FRale/QYlFxt/bUnB0/nLd1jFPGxGK1A=="], "emoji-regex": ["emoji-regex@10.6.0", "", {}, "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A=="], @@ -2372,8 +2369,6 @@ "encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], - "encoding": ["encoding@0.1.13", "", { "dependencies": { "iconv-lite": "^0.6.2" } }, "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A=="], - "encoding-japanese": ["encoding-japanese@2.2.0", "", {}, "sha512-EuJWwlHPZ1LbADuKTClvHtwbaFn4rOD+dRAbWysqEOXRc2Uui0hJInNJrsdH0c+OhJA4nrCBdSkW4DD5YxAo6A=="], "encoding-sniffer": ["encoding-sniffer@0.2.1", "", { "dependencies": { "iconv-lite": "^0.6.3", "whatwg-encoding": "^3.1.1" } }, "sha512-5gvq20T6vfpekVtqrYQsSCFZ1wEg5+wW0/QaZMWkFr6BqD3NfKs0rLCx4rrVlSWJeZb5NBJgVLswK/w2MWU+Gw=="], @@ -2422,7 +2417,7 @@ "esprima": ["esprima@4.0.1", "", { "bin": { "esparse": "./bin/esparse.js", "esvalidate": "./bin/esvalidate.js" } }, "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A=="], - "esrap": ["esrap@2.2.5", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" }, "peerDependencies": { "@typescript-eslint/types": "^8.2.0" }, "optionalPeers": ["@typescript-eslint/types"] }, "sha512-/yLB1538mag+dn0wsePTe8C0rDIjUOaJpMs2McodSzmM2msWcZsBSdRtg6HOBt0A/r82BN+Md3pgwSc/uWt2Ig=="], + "esrap": ["esrap@2.2.6", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" }, "peerDependencies": { "@typescript-eslint/types": "^8.2.0" }, "optionalPeers": ["@typescript-eslint/types"] }, "sha512-WN0clHt0a4mzC780UBVVBpsj4vSSjOFNRd2WjYtduB9HeKxm1sjHMNUwLEHVjI3FdCQD/Hurgz9ftbKEzP79Ow=="], "estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="], @@ -2468,7 +2463,7 @@ "express": ["express@5.2.1", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.1", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "depd": "^2.0.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw=="], - "express-rate-limit": ["express-rate-limit@8.4.1", "", { "dependencies": { "ip-address": "10.1.0" }, "peerDependencies": { "express": ">= 4.11" } }, "sha512-NGVYwQSAyEQgzxX1iCM978PP9AdO/hW93gMcF6ZwQCm+rFvLsBH6w4xcXWTcliS8La5EPRN3p9wzItqBwJrfNw=="], + "express-rate-limit": ["express-rate-limit@8.5.1", "", { "dependencies": { "ip-address": "^10.2.0" }, "peerDependencies": { "express": ">= 4.11" } }, "sha512-5O6KYmyJEpuPJV5hNTXKbAHWRqrzyu+OI3vUnSd2kXFubIVpG7ezpgxQy76Zo5GQZtrQBg86hF+CM/NX+cioiQ=="], "exsolve": ["exsolve@1.0.8", "", {}, "sha512-LmDxfWXwcTArk8fUEnOfSZpHOJ6zOMUJKOtFLFqJLoKJetuQG874Uc7/Kki7zFLzYybmZhp1M7+98pfMqeX8yA=="], @@ -2494,11 +2489,11 @@ "fast-sha256": ["fast-sha256@1.3.0", "", {}, "sha512-n11RGP/lrWEFI/bWdygLxhI+pVeo1ZYIVwvvPkW7azl/rOy+F3HYRZ2K5zeE9mmkhQppyv9sQFx0JM9UabnpPQ=="], - "fast-uri": ["fast-uri@3.1.0", "", {}, "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA=="], + "fast-uri": ["fast-uri@3.1.2", "", {}, "sha512-rVjf7ArG3LTk+FS6Yw81V1DLuZl1bRbNrev6Tmd/9RaroeeRRJhAt7jg/6YFxbvAQXUCavSoZhPPj6oOx+5KjQ=="], - "fast-xml-builder": ["fast-xml-builder@1.1.5", "", { "dependencies": { "path-expression-matcher": "^1.1.3" } }, "sha512-4TJn/8FKLeslLAH3dnohXqE3QSoxkhvaMzepOIZytwJXZO69Bfz0HBdDHzOTOon6G59Zrk6VQ2bEiv1t61rfkA=="], + "fast-xml-builder": ["fast-xml-builder@1.1.9", "", { "dependencies": { "path-expression-matcher": "^1.1.3" } }, "sha512-jcyKVSEX13iseJqg7n/KWw+xnu/7fdrZ333Fac54KjHDIELVCfDDJXYIm6DTJ0Su4gSzrhqiK0DzY/wZbF40mw=="], - "fast-xml-parser": ["fast-xml-parser@5.7.2", "", { "dependencies": { "@nodable/entities": "^2.1.0", "fast-xml-builder": "^1.1.5", "path-expression-matcher": "^1.5.0", "strnum": "^2.2.3" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-P7oW7tLbYnhOLQk/Gv7cZgzgMPP/XN03K02/Jy6Y/NHzyIAIpxuZIM/YqAkfiXFPxA2CTm7NtCijK9EDu09u2w=="], + "fast-xml-parser": ["fast-xml-parser@5.7.3", "", { "dependencies": { "@nodable/entities": "^2.1.0", "fast-xml-builder": "^1.1.7", "path-expression-matcher": "^1.5.0", "strnum": "^2.2.3" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-C0AaNuC+mscy6vrAQKAc/rMq+zAPHodfHGZu4sGVehvAQt/JLG1O5zEcYcXSY5zSqr4YVgxsB+pHXTq0i7eDlg=="], "fastq": ["fastq@1.20.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw=="], @@ -2654,7 +2649,7 @@ "hex-rgb": ["hex-rgb@4.3.0", "", {}, "sha512-Ox1pJVrDCyGHMG9CFg1tmrRUMRPRsAWYc/PinY0XzJU4K7y7vjNoLKIQ7BR5UJMCxNN8EM1MNDmHWA/B3aZUuw=="], - "hono": ["hono@4.12.15", "", {}, "sha512-qM0jDhFEaCBb4TxoW7f53Qrpv9RBiayUHo0S52JudprkhvpjIrGoU1mnnr29Fvd1U335ZFPZQY1wlkqgfGXyLg=="], + "hono": ["hono@4.12.18", "", {}, "sha512-RWzP96k/yv0PQfyXnWjs6zot20TqfpfsNXhOnev8d1InAxubW93L11/oNUc3tQqn2G0bSdAOBpX+2uDFHV7kdQ=="], "html-encoding-sniffer": ["html-encoding-sniffer@4.0.0", "", { "dependencies": { "whatwg-encoding": "^3.1.1" } }, "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ=="], @@ -2702,6 +2697,8 @@ "import-in-the-middle": ["import-in-the-middle@1.15.0", "", { "dependencies": { "acorn": "^8.14.0", "acorn-import-attributes": "^1.9.5", "cjs-module-lexer": "^1.2.2", "module-details-from-path": "^1.0.3" } }, "sha512-bpQy+CrsRmYmoPMAE/0G33iwRqwW4ouqdRg8jgbH3aKuCtOc8lxgmYXg2dMM92CRiGP660EtBcymH/eVUpCSaA=="], + "import-meta-resolve": ["import-meta-resolve@4.2.0", "", {}, "sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg=="], + "indent-string": ["indent-string@4.0.0", "", {}, "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg=="], "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], @@ -2718,7 +2715,7 @@ "ioredis": ["ioredis@5.10.1", "", { "dependencies": { "@ioredis/commands": "1.5.1", "cluster-key-slot": "^1.1.0", "debug": "^4.3.4", "denque": "^2.1.0", "lodash.defaults": "^4.2.0", "lodash.isarguments": "^3.1.0", "redis-errors": "^1.2.0", "redis-parser": "^3.0.0", "standard-as-callback": "^2.1.0" } }, "sha512-HuEDBTI70aYdx1v6U97SbNx9F1+svQKBDo30o0b9fw055LMepzpOOd0Ccg9Q6tbqmBSJaMuY0fB7yw9/vjBYCA=="], - "ip-address": ["ip-address@10.1.0", "", {}, "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q=="], + "ip-address": ["ip-address@10.2.0", "", {}, "sha512-/+S6j4E9AHvW9SWMSEY9Xfy66O5PWvVEJ08O0y5JGyEKQpojb0K0GKpz/v5HJ/G0vi3D2sjGK78119oXZeE0qA=="], "ipaddr.js": ["ipaddr.js@2.3.0", "", {}, "sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg=="], @@ -2730,7 +2727,7 @@ "is-binary-path": ["is-binary-path@2.1.0", "", { "dependencies": { "binary-extensions": "^2.0.0" } }, "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="], - "is-core-module": ["is-core-module@2.16.1", "", { "dependencies": { "hasown": "^2.0.2" } }, "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w=="], + "is-core-module": ["is-core-module@2.16.2", "", { "dependencies": { "hasown": "^2.0.3" } }, "sha512-evOr8xfXKxE6qSR0hSXL2r3sd7ALj8+7jQEUvPYcm5sgZFdJ+AYzT6yNmJenvIYQBgIGwfwz08sL8zoL7yq2BA=="], "is-decimal": ["is-decimal@2.0.1", "", {}, "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A=="], @@ -2834,9 +2831,9 @@ "kleur": ["kleur@3.0.3", "", {}, "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w=="], - "kysely": ["kysely@0.28.16", "", {}, "sha512-3i5pmOiZvMDj00qhrIVbH0AnioVTx22DMP7Vn5At4yJO46iy+FM8Y/g61ltenLVSo3fiO8h8Q3QOFgf/gQ72ww=="], + "kysely": ["kysely@0.28.17", "", {}, "sha512-nbD8lB9EB3wNdMhOCdx5Li8DxnLbvKByylRLcJ1h+4SkrowVeECAyZlyiKMThF7xFdRz0jSQ2MoJr+wXux2y0Q=="], - "langium": ["langium@4.2.2", "", { "dependencies": { "@chevrotain/regexp-to-ast": "~12.0.0", "chevrotain": "~12.0.0", "chevrotain-allstar": "~0.4.1", "vscode-languageserver": "~9.0.1", "vscode-languageserver-textdocument": "~1.0.11", "vscode-uri": "~3.1.0" } }, "sha512-JUshTRAfHI4/MF9dH2WupvjSXyn8JBuUEWazB8ZVJUtXutT0doDlAv1XKbZ1Pb5sMexa8FF4CFBc0iiul7gbUQ=="], + "langium": ["langium@4.2.3", "", { "dependencies": { "@chevrotain/regexp-to-ast": "~12.0.0", "chevrotain": "~12.0.0", "chevrotain-allstar": "~0.4.3", "vscode-languageserver": "~9.0.1", "vscode-languageserver-textdocument": "~1.0.11", "vscode-uri": "~3.1.0" } }, "sha512-sOPIi4hISFnY7twwV97ca1TsxpBtXq0URu/LL1AvxwccPG/RIBBlKS7a/f/EL6w8lTNaS0EFs/F+IdSOaqYpng=="], "langsmith": ["langsmith@0.3.87", "", { "dependencies": { "@types/uuid": "^10.0.0", "chalk": "^4.1.2", "console-table-printer": "^2.12.1", "p-queue": "^6.6.2", "semver": "^7.6.3", "uuid": "^10.0.0" }, "peerDependencies": { "@opentelemetry/api": "*", "@opentelemetry/exporter-trace-otlp-proto": "*", "@opentelemetry/sdk-trace-base": "*", "openai": "*" }, "optionalPeers": ["@opentelemetry/api", "@opentelemetry/exporter-trace-otlp-proto", "@opentelemetry/sdk-trace-base", "openai"] }, "sha512-XXR1+9INH8YX96FKWc5tie0QixWz6tOqAsAKfcJyPkE0xPep+NDz0IQLR32q4bn10QK3LqD2HN6T3n6z1YLW7Q=="], @@ -2928,7 +2925,7 @@ "loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="], - "lru-cache": ["lru-cache@11.3.5", "", {}, "sha512-NxVFwLAnrd9i7KUBxC4DrUhmgjzOs+1Qm50D3oF1/oL+r1NpZ4gA7xvG0/zJ8evR7zIKn4vLf7qTNduWFtCrRw=="], + "lru-cache": ["lru-cache@11.3.6", "", {}, "sha512-Gf/KoL3C/MlI7Bt0PGI9I+TeTC/I6r/csU58N4BSNc4lppLBeKsOdFYkK+dX0ABDUMJNfCHTyPpzwwO21Awd3A=="], "lru.min": ["lru.min@1.1.4", "", {}, "sha512-DqC6n3QQ77zdFpCMASA1a3Jlb64Hv2N2DciFGkO/4L9+q/IpIAuRlKOvCXabtRW6cQf8usbmM6BE/TOPysCdIA=="], @@ -3146,7 +3143,7 @@ "nano-spawn": ["nano-spawn@1.0.3", "", {}, "sha512-jtpsQDetTnvS2Ts1fiRdci5rx0VYws5jGyC+4IYOTnIQ/wwdf6JdomlHBwqC3bJYOvaKu0C2GSZ1A60anrYpaA=="], - "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], + "nanoid": ["nanoid@3.3.12", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-ZB9RH/39qpq5Vu6Y+NmUaFhQR6pp+M2Xt76XBnEwDaGcVAqhlvxrl3B2bKS5D3NH3QR76v3aSrKaF/Kiy7lEtQ=="], "nanostores": ["nanostores@1.3.0", "", {}, "sha512-XPUa/jz+P1oJvN9VBxw4L9MtdFfaH3DAryqPssqhb2kXjmb9npz0dly6rCsgFWOPr4Yg9mTfM3MDZgZZ+7A3lA=="], @@ -3170,7 +3167,7 @@ "next-themes": ["next-themes@0.4.6", "", { "peerDependencies": { "react": "^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc", "react-dom": "^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc" } }, "sha512-pZvgD5L0IEvX5/9GWyHMf3m8BKiVQwsCMHfoFosXtXBMnaS0ZnIJ9ST4b4NqLVKDEm8QBxoNNGNaBv2JNF6XNA=="], - "node-abi": ["node-abi@3.89.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-6u9UwL0HlAl21+agMN3YAMXcKByMqwGx+pq+P76vii5f7hTPtKDp08/H9py6DY+cfDw7kQNTGEj/rly3IgbNQA=="], + "node-abi": ["node-abi@3.92.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-KdHvFWZjEKDf0cakgFjebl371GPsISX2oZHcuyKqM7DtogIsHrqKeLTo8wBHxaXRAQlY2PsPlZmfo+9ZCxEREQ=="], "node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="], @@ -3290,7 +3287,7 @@ "path-scurry": ["path-scurry@2.0.2", "", { "dependencies": { "lru-cache": "^11.0.0", "minipass": "^7.1.2" } }, "sha512-3O/iVVsJAPsOnpwWIeD+d6z/7PmqApyQePUtCndjatj/9I5LylHvt5qluFaBT3I5h3r1ejfR056c+FCv+NnNXg=="], - "path-to-regexp": ["path-to-regexp@8.4.2", "", {}, "sha512-qRcuIdP69NPm4qbACK+aDogI5CBDMi1jKe0ry5rSQJz8JVLsC7jV8XpiJjGRLLol3N+R5ihGYcrPLTno6pAdBA=="], + "path-to-regexp": ["path-to-regexp@0.1.13", "", {}, "sha512-A/AGNMFN3c8bOlvV9RreMdrv7jsmF9XIfDeCd87+I8RNg6s78BhJxMu69NEMHBSJFxKidViTEdruRwEk/WIKqA=="], "pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], @@ -3342,7 +3339,7 @@ "popper.js": ["popper.js@1.16.1", "", {}, "sha512-Wb4p1J4zyFTbM+u6WuO4XstYx4Ky9Cewe4DWrel7B0w6VVICvPwdOpotjzcf6eD8TsckVnIMNONQyPIUFOUbCQ=="], - "postcss": ["postcss@8.5.12", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-W62t/Se6rA0Az3DfCL0AqJwXuKwBeYg6nOaIgzP+xZ7N5BFCI7DYi1qs6ygUYT6rvfi6t9k65UMLJC+PHZpDAA=="], + "postcss": ["postcss@8.5.14", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-SoSL4+OSEtR99LHFZQiJLkT59C5B1amGO1NzTwj7TT1qCUgUO6hxOvzkOYxD+vMrXBM3XJIKzokoERdqQq/Zmg=="], "postcss-import": ["postcss-import@15.1.0", "", { "dependencies": { "postcss-value-parser": "^4.0.0", "read-cache": "^1.0.0", "resolve": "^1.1.7" }, "peerDependencies": { "postcss": "^8.0.0" } }, "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew=="], @@ -3436,7 +3433,7 @@ "react-floater": ["react-floater@0.7.9", "", { "dependencies": { "deepmerge": "^4.3.1", "is-lite": "^0.8.2", "popper.js": "^1.16.0", "prop-types": "^15.8.1", "tree-changes": "^0.9.1" }, "peerDependencies": { "react": "15 - 18", "react-dom": "15 - 18" } }, "sha512-NXqyp9o8FAXOATOEo0ZpyaQ2KPb4cmPMXGWkx377QtJkIXHlHRAGer7ai0r0C1kG5gf+KJ6Gy+gdNIiosvSicg=="], - "react-hook-form": ["react-hook-form@7.74.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17 || ^18 || ^19" } }, "sha512-yR6wHr99p9wFv686jhRWVSFhUvDvNbdUf2dKlbno8/VKOCuoNobDGC6S+M2dua9A9Yo8vpcrp8assIYbsZCQ9g=="], + "react-hook-form": ["react-hook-form@7.75.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17 || ^18 || ^19" } }, "sha512-Ovv94H+0p3sJ7B9B5QxPuCP1u8V/cHuVGyH55cSwodYDtoJwK+fqk3vjfIgSX59I2U/bU4z0nRJ9HMLpNiWEmw=="], "react-innertext": ["react-innertext@1.1.5", "", { "peerDependencies": { "@types/react": ">=0.0.0 <=99", "react": ">=0.0.0 <=99" } }, "sha512-PWAqdqhxhHIv80dT9znP2KvS+hfkbRovFp4zFYHFFlOoQLRiawIic81gKb3U1wEyJZgMwgs3JoLtwryASRWP3Q=="], @@ -3548,7 +3545,7 @@ "robust-predicates": ["robust-predicates@3.0.3", "", {}, "sha512-NS3levdsRIUOmiJ8FZWCP7LG3QpJyrs/TE0Zpf1yvZu8cAJJ6QMW92H1c7kWpdIHo8RvmLxN/o2JXTKHp74lUA=="], - "rollup": ["rollup@4.60.2", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.60.2", "@rollup/rollup-android-arm64": "4.60.2", "@rollup/rollup-darwin-arm64": "4.60.2", "@rollup/rollup-darwin-x64": "4.60.2", "@rollup/rollup-freebsd-arm64": "4.60.2", "@rollup/rollup-freebsd-x64": "4.60.2", "@rollup/rollup-linux-arm-gnueabihf": "4.60.2", "@rollup/rollup-linux-arm-musleabihf": "4.60.2", "@rollup/rollup-linux-arm64-gnu": "4.60.2", "@rollup/rollup-linux-arm64-musl": "4.60.2", "@rollup/rollup-linux-loong64-gnu": "4.60.2", "@rollup/rollup-linux-loong64-musl": "4.60.2", "@rollup/rollup-linux-ppc64-gnu": "4.60.2", "@rollup/rollup-linux-ppc64-musl": "4.60.2", "@rollup/rollup-linux-riscv64-gnu": "4.60.2", "@rollup/rollup-linux-riscv64-musl": "4.60.2", "@rollup/rollup-linux-s390x-gnu": "4.60.2", "@rollup/rollup-linux-x64-gnu": "4.60.2", "@rollup/rollup-linux-x64-musl": "4.60.2", "@rollup/rollup-openbsd-x64": "4.60.2", "@rollup/rollup-openharmony-arm64": "4.60.2", "@rollup/rollup-win32-arm64-msvc": "4.60.2", "@rollup/rollup-win32-ia32-msvc": "4.60.2", "@rollup/rollup-win32-x64-gnu": "4.60.2", "@rollup/rollup-win32-x64-msvc": "4.60.2", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-J9qZyW++QK/09NyN/zeO0dG/1GdGfyp9lV8ajHnRVLfo/uFsbji5mHnDgn/qYdUHyCkM2N+8VyspgZclfAh0eQ=="], + "rollup": ["rollup@4.60.3", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.60.3", "@rollup/rollup-android-arm64": "4.60.3", "@rollup/rollup-darwin-arm64": "4.60.3", "@rollup/rollup-darwin-x64": "4.60.3", "@rollup/rollup-freebsd-arm64": "4.60.3", "@rollup/rollup-freebsd-x64": "4.60.3", "@rollup/rollup-linux-arm-gnueabihf": "4.60.3", "@rollup/rollup-linux-arm-musleabihf": "4.60.3", "@rollup/rollup-linux-arm64-gnu": "4.60.3", "@rollup/rollup-linux-arm64-musl": "4.60.3", "@rollup/rollup-linux-loong64-gnu": "4.60.3", "@rollup/rollup-linux-loong64-musl": "4.60.3", "@rollup/rollup-linux-ppc64-gnu": "4.60.3", "@rollup/rollup-linux-ppc64-musl": "4.60.3", "@rollup/rollup-linux-riscv64-gnu": "4.60.3", "@rollup/rollup-linux-riscv64-musl": "4.60.3", "@rollup/rollup-linux-s390x-gnu": "4.60.3", "@rollup/rollup-linux-x64-gnu": "4.60.3", "@rollup/rollup-linux-x64-musl": "4.60.3", "@rollup/rollup-openbsd-x64": "4.60.3", "@rollup/rollup-openharmony-arm64": "4.60.3", "@rollup/rollup-win32-arm64-msvc": "4.60.3", "@rollup/rollup-win32-ia32-msvc": "4.60.3", "@rollup/rollup-win32-x64-gnu": "4.60.3", "@rollup/rollup-win32-x64-msvc": "4.60.3", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-pAQK9HalE84QSm4Po3EmWIZPd3FnjkShVkiMlz1iligWYkWQ7wHYd1PF/T7QZ5TVSD6uSTon5gBVMSM4JfBV+A=="], "rou3": ["rou3@0.5.1", "", {}, "sha512-OXMmJ3zRk2xeXFGfA3K+EOPHC5u7RDFG7lIOx0X1pdnhUkI8MdVrbV+sNsD80ElpUZ+MRHdyxPnFthq9VHs8uQ=="], @@ -3772,7 +3769,7 @@ "tapable": ["tapable@2.3.3", "", {}, "sha512-uxc/zpqFg6x7C8vOE7lh6Lbda8eEL9zmVm/PLeTPBRhh1xCgdWaQ+J1CUieGpIfm2HdtsUpRv+HshiasBMcc6A=="], - "tar": ["tar@7.5.13", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-tOG/7GyXpFevhXVh8jOPJrmtRpOTsYqUIkVdVooZYJS/z8WhfQUX8RJILmeuJNinGAMSu1veBr4asSHFt5/hng=="], + "tar": ["tar@7.5.14", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-/7sHKgQO3JLP9ESlwTYUUftHUadOURUqq23xs1vjcnp8Vss6k0wCfzulyEtk5g91pjvnuriimGlyG7k6msrzRw=="], "tar-fs": ["tar-fs@2.1.4", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ=="], @@ -3812,9 +3809,9 @@ "tinyspy": ["tinyspy@4.0.4", "", {}, "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q=="], - "tldts": ["tldts@6.1.86", "", { "dependencies": { "tldts-core": "^6.1.86" }, "bin": { "tldts": "bin/cli.js" } }, "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ=="], + "tldts": ["tldts@7.0.30", "", { "dependencies": { "tldts-core": "^7.0.30" }, "bin": { "tldts": "bin/cli.js" } }, "sha512-ELrFxuqsDdHUwoh0XxDbxuLD3Wnz49Z57IFvTtvWy1hJdcMZjXLIuonjilCiWHlT2GbE4Wlv1wKVTzDFnXH1aw=="], - "tldts-core": ["tldts-core@6.1.86", "", {}, "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA=="], + "tldts-core": ["tldts-core@7.0.30", "", {}, "sha512-uiHN8PIB1VmWyS98eZYja4xzlYqeFZVjb4OuYlJQnZAuJhMw4PbKQOKgHKhBdJR3FE/t5mUQ1Kd80++B+qhD1Q=="], "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], @@ -3866,7 +3863,7 @@ "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], - "ufo": ["ufo@1.6.3", "", {}, "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q=="], + "ufo": ["ufo@1.6.4", "", {}, "sha512-JFNbkD1Svwe0KvGi8GOeLcP4kAWQ609twvCdcHxq1oSL8svv39ZuSvajcD8B+5D0eL4+s1Is2D/O6KN3qcTeRA=="], "uid2": ["uid2@1.0.0", "", {}, "sha512-+I6aJUv63YAcY9n4mQreLUt0d4lvwkkopDNmpomkAUz0fAkEMV9pRWxN0EjhW1YfRhcuyHg2v3mwddCDW1+LFQ=="], @@ -3924,7 +3921,7 @@ "utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="], - "uuid": ["uuid@11.1.0", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A=="], + "uuid": ["uuid@11.1.1", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-vIYxrBCC/N/K+Js3qSN88go7kIfNPssr/hHCesKCQNAjmgvYS2oqr69kIufEG+O4+PfezOH4EbIeHCfFov8ZgQ=="], "uzip": ["uzip@0.20201231.0", "", {}, "sha512-OZeJfZP+R0z9D6TmBgLq2LHzSSptGMGDGigGiEe0pr8UBe/7fdflgHlHBNDASTXB5jnFuxHpNaJywSg8YFeGng=="], @@ -3992,7 +3989,7 @@ "ws": ["ws@8.20.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA=="], - "xlsx": ["xlsx@https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz", { "bin": { "xlsx": "./bin/xlsx.njs" } }], + "xlsx": ["xlsx@https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz", { "bin": { "xlsx": "./bin/xlsx.njs" } }, "sha512-oLDq3jw7AcLqKWH2AhCpVTZl8mf6X2YReP+Neh0SJUzV/BdZYjth94tG5toiMB1PPrYtxOCfaoUCkvtuH+3AJA=="], "xml": ["xml@1.0.1", "", {}, "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw=="], @@ -4018,7 +4015,7 @@ "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], - "yaml": ["yaml@2.8.3", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg=="], + "yaml": ["yaml@2.8.4", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-ml/JPOj9fOQK8RNnWojA67GbZ0ApXAUlN2UQclwv2eVgTgn7O9gg9o7paZWKMp4g0H3nTLtS9LVzhkpOFIKzog=="], "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], @@ -4042,7 +4039,7 @@ "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], - "@antfu/install-pkg/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="], + "@antfu/install-pkg/tinyexec": ["tinyexec@1.1.2", "", {}, "sha512-dAqSqE/RabpBKI8+h26GfLq6Vb3JVXs30XYQjdMjaj/c2tS8IYYMbIzP599KtRj7c57/wYApb3QjgRgXmrCukA=="], "@asamuzakjp/css-color/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], @@ -4054,29 +4051,9 @@ "@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="], - "@aws-sdk/client-sso/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/client-sso/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-sso/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-sso/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-sso/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/client-sso/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-sso/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-sso/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-sso/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-sso/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/credential-provider-sso/@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.1038.0", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/nested-clients": "^3.997.4", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-Qniru+9oGGb/HNK/gGZWbV3jsD0k71ngE7qMQ/x6gYNYLd2EOwHCS6E2E6jfkaqO4i0d+nNKmfRy8bNcshKdGQ=="], + "@aws-sdk/credential-provider-sso/@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.1041.0", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@aws-sdk/nested-clients": "^3.997.6", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-Th7kPI6YPtvJUcdznooXJMy+9rQWjmEF81LxaJssngBzuysK4a/x+l8kjm1zb7nYsUPbndnBdUnwng/3PLvtGw=="], - "@aws-sdk/dynamodb-codec/@aws-sdk/core": ["@aws-sdk/core@3.974.8", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws-sdk/xml-builder": "^3.972.22", "@smithy/core": "^3.23.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.6", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-njR2qoG6ZuB0kvAS2FyICsFZJ6gmCcf2X/7JcD14sUvGDm26wiZ5BrA6LOiUxKFEF+IVe7kdroxyE00YlkiYsw=="], + "@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.7.2", "", { "dependencies": { "@nodable/entities": "^2.1.0", "fast-xml-builder": "^1.1.5", "path-expression-matcher": "^1.5.0", "strnum": "^2.2.3" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-P7oW7tLbYnhOLQk/Gv7cZgzgMPP/XN03K02/Jy6Y/NHzyIAIpxuZIM/YqAkfiXFPxA2CTm7NtCijK9EDu09u2w=="], "@azure/communication-email/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], @@ -4098,8 +4075,6 @@ "@browserbasehq/stagehand/@anthropic-ai/sdk": ["@anthropic-ai/sdk@0.39.0", "", { "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", "node-fetch": "^2.6.7" } }, "sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg=="], - "@browserbasehq/stagehand/@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.25.3", "", { "dependencies": { "@hono/node-server": "^1.19.9", "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "content-type": "^1.0.5", "cors": "^2.8.5", "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "eventsource-parser": "^3.0.0", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "jose": "^6.1.1", "json-schema-typed": "^8.0.2", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.25 || ^4.0", "zod-to-json-schema": "^3.25.0" }, "peerDependencies": { "@cfworker/json-schema": "^4.1.1" }, "optionalPeers": ["@cfworker/json-schema"] }, "sha512-vsAMBMERybvYgKbg/l4L1rhS7VXV1c0CtyJg72vwxONVX0l4ZfKVAnZEWTQixJGTzKnELjQ59e4NbdFDALRiAQ=="], - "@cerebras/cerebras_cloud_sdk/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="], "@cerebras/cerebras_cloud_sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], @@ -4142,7 +4117,7 @@ "@opentelemetry/otlp-transformer/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/resources/@opentelemetry/core": ["@opentelemetry/core@2.7.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-DT12SXVwV2eoJrGf4nnsvZojxxeQo+LlNAsoYGRRObPWTeN6APiqZ2+nqDCQDvQX40eLi1AePONS0onoASp3yQ=="], + "@opentelemetry/resources/@opentelemetry/core": ["@opentelemetry/core@2.7.1", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-QAqIj32AtK6+pEVNG7EOVxHdE06RP+FM5qpiEJ4RtDcFIqKUZHYhl7/7UY5efhwmwNAg7j8QbJVBLxMerc0+gw=="], "@opentelemetry/sdk-logs/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], @@ -4212,7 +4187,7 @@ "@socket.io/redis-adapter/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], - "@tailwindcss/node/jiti": ["jiti@2.6.1", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="], + "@tailwindcss/node/jiti": ["jiti@2.7.0", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-AC/7JofJvZGrrneWNaEnJeOLUx+JlGt7tNa0wZiRPT4MY1wmfKjt2+6O2p2uz2+skll8OZZmJMNqeke7kKbNgQ=="], "@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.10.0", "", { "dependencies": { "@emnapi/wasi-threads": "1.2.1", "tslib": "^2.4.0" }, "bundled": true }, "sha512-yq6OkJ4p82CAfPl0u9mQebQHKPJkY7WrIuk205cTYnYe+k2Z8YBh11FrbRG/H6ihirqcacOgl2BIO8oyMQLeXw=="], @@ -4222,7 +4197,7 @@ "@tailwindcss/oxide-wasm32-wasi/@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@1.1.4", "", { "dependencies": { "@tybys/wasm-util": "^0.10.1" }, "peerDependencies": { "@emnapi/core": "^1.7.1", "@emnapi/runtime": "^1.7.1" }, "bundled": true }, "sha512-3NQNNgA1YSlJb/kMH1ildASP9HW7/7kYnRI2szWJaofaS1hWmbGI4H+d3+22aGzXXN9IJ+n+GiFVcGipJP18ow=="], - "@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.10.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg=="], + "@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.10.2", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-RoBvJ2X0wuKlWFIjrwffGw1IqZHKQqzIchKaadZZfnNpsAYp2mM0h36JtPCjNDAHGgYez/15uMBpfGwchhiMgg=="], "@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], @@ -4268,6 +4243,8 @@ "@trigger.dev/sdk/cronstrue": ["cronstrue@2.61.0", "", { "bin": { "cronstrue": "bin/cli.js" } }, "sha512-ootN5bvXbIQI9rW94+QsXN5eROtXWwew6NkdGxIRpS/UFWRggL0G5Al7a9GTBFEsuvVhJ2K3CntIIVt7L2ILhA=="], + "@trigger.dev/sdk/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], + "@trigger.dev/sdk/uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="], "@types/cors/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], @@ -4278,8 +4255,6 @@ "@types/node-fetch/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], - "@types/nodemailer/@aws-sdk/client-sesv2": ["@aws-sdk/client-sesv2@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/signature-v4-multi-region": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-jDQ4x2HwB2/UXBS7CTeSDiIb+sVsYGDyxTeXdrRAtqNdGv8kC54fbwokDiJ/mnMyB2gyXWw57BqeDJNkZuLmsw=="], - "@types/nodemailer/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], "@types/papaparse/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], @@ -4296,6 +4271,8 @@ "ai/@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="], + "ajv-formats/ajv": ["ajv@8.20.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-Thbli+OlOj+iMPYFBVBfJ3OmCAnaSyNn4M1vz9T6Gka5Jt9ba/HIR56joy65tY6kx/FCF5VXNB819Y7/GUrBGA=="], + "ansi-escapes/type-fest": ["type-fest@0.21.3", "", {}, "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w=="], "anymatch/picomatch": ["picomatch@2.3.2", "", {}, "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA=="], @@ -4312,6 +4289,8 @@ "c12/confbox": ["confbox@0.2.4", "", {}, "sha512-ysOGlgTFbN2/Y6Cg3Iye8YKulHw+R2fNXHrgSmXISQdMnomY6eNDprVdW9R5xBguEqI954+S6709UyiO7B+6OQ=="], + "c12/jiti": ["jiti@2.7.0", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-AC/7JofJvZGrrneWNaEnJeOLUx+JlGt7tNa0wZiRPT4MY1wmfKjt2+6O2p2uz2+skll8OZZmJMNqeke7kKbNgQ=="], + "c12/pkg-types": ["pkg-types@2.3.1", "", { "dependencies": { "confbox": "^0.2.4", "exsolve": "^1.0.8", "pathe": "^2.0.3" } }, "sha512-y+ichcgc2LrADuhLNAx8DFjVfgz91pRxfZdI3UDhxHvcVEZsenLO+7XaU5vOp0u/7V/wZ+plyuQxtrDlZJ+yeg=="], "chrome-launcher/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], @@ -4340,14 +4319,12 @@ "docx/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], - "docx/nanoid": ["nanoid@5.1.9", "", { "bin": { "nanoid": "bin/nanoid.js" } }, "sha512-ZUvP7KeBLe3OZ1ypw6dI/TzYJuvHP77IM4Ry73waSQTLn8/g8rpdjfyVAh7t1/+FjBtG4lCP42MEbDxOsRpBMw=="], + "docx/nanoid": ["nanoid@5.1.11", "", { "bin": { "nanoid": "bin/nanoid.js" } }, "sha512-v+KEsUv2ps74PaSKv0gHTxTCgMXOIfBEbaqa6w6ISIGC7ZsvHN4N9oJ8d4cmf0n5oTzQz2SLmThbQWhjd/8eKg=="], "dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], "e2b/glob": ["glob@11.1.0", "", { "dependencies": { "foreground-child": "^3.3.1", "jackspeak": "^4.1.1", "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw=="], - "encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], - "encoding-sniffer/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], "engine.io/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], @@ -4380,7 +4357,7 @@ "fumadocs-mdx/esbuild": ["esbuild@0.28.0", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.28.0", "@esbuild/android-arm": "0.28.0", "@esbuild/android-arm64": "0.28.0", "@esbuild/android-x64": "0.28.0", "@esbuild/darwin-arm64": "0.28.0", "@esbuild/darwin-x64": "0.28.0", "@esbuild/freebsd-arm64": "0.28.0", "@esbuild/freebsd-x64": "0.28.0", "@esbuild/linux-arm": "0.28.0", "@esbuild/linux-arm64": "0.28.0", "@esbuild/linux-ia32": "0.28.0", "@esbuild/linux-loong64": "0.28.0", "@esbuild/linux-mips64el": "0.28.0", "@esbuild/linux-ppc64": "0.28.0", "@esbuild/linux-riscv64": "0.28.0", "@esbuild/linux-s390x": "0.28.0", "@esbuild/linux-x64": "0.28.0", "@esbuild/netbsd-arm64": "0.28.0", "@esbuild/netbsd-x64": "0.28.0", "@esbuild/openbsd-arm64": "0.28.0", "@esbuild/openbsd-x64": "0.28.0", "@esbuild/openharmony-arm64": "0.28.0", "@esbuild/sunos-x64": "0.28.0", "@esbuild/win32-arm64": "0.28.0", "@esbuild/win32-ia32": "0.28.0", "@esbuild/win32-x64": "0.28.0" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-sNR9MHpXSUV/XB4zmsFKN+QgVG82Cc7+/aaxJ8Adi8hyOac+EXptIp45QBPaVyX3N70664wRbTcLTOemCAnyqw=="], - "fumadocs-mdx/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="], + "fumadocs-mdx/tinyexec": ["tinyexec@1.1.2", "", {}, "sha512-dAqSqE/RabpBKI8+h26GfLq6Vb3JVXs30XYQjdMjaj/c2tS8IYYMbIzP599KtRj7c57/wYApb3QjgRgXmrCukA=="], "fumadocs-openapi/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.4", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA=="], @@ -4420,8 +4397,6 @@ "isomorphic-unfetch/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], - "json-schema-to-typescript/js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], - "katex/commander": ["commander@8.3.0", "", {}, "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww=="], "langsmith/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], @@ -4542,6 +4517,8 @@ "restore-cursor/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], + "router/path-to-regexp": ["path-to-regexp@8.4.2", "", {}, "sha512-qRcuIdP69NPm4qbACK+aDogI5CBDMi1jKe0ry5rSQJz8JVLsC7jV8XpiJjGRLLol3N+R5ihGYcrPLTno6pAdBA=="], + "sim/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], "sim/lucide-react": ["lucide-react@0.479.0", "", { "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-aBhNnveRhorBOK7uA4gDjgaf+YlHMdMhQ/3cupk6exM10hWlEU+2QtWYOfhXhjAsmdb6LeKR+NZnow4UxRRiTQ=="], @@ -4564,11 +4541,9 @@ "socket.io-client/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], - "socks/ip-address": ["ip-address@10.1.1", "", {}, "sha512-1FMu8/N15Ck1BL551Jf42NYIoin2unWjLQ2Fze/DXryJRl5twqtwNHlO39qERGbIOcKYWHdgRryhOC+NG4eaLw=="], - "source-map-support/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], - "streamdown/marked": ["marked@17.0.4", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-NOmVMM+KAokHMvjWmC5N/ZOvgmSWuqJB8FoYI019j4ogb/PeRMKoKIjReZ2w3376kkA8dSJIP8uD993Kxc0iRQ=="], + "streamdown/marked": ["marked@17.0.6", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-gB0gkNafnonOw0obSTEGZTT86IuhILt2Wfx0mWH/1Au83kybTayroZ/V6nS25mN7u8ASy+5fMhgB3XPNrOZdmA=="], "string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], @@ -4588,6 +4563,8 @@ "test-exclude/glob": ["glob@10.5.0", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg=="], + "tough-cookie/tldts": ["tldts@6.1.86", "", { "dependencies": { "tldts-core": "^6.1.86" }, "bin": { "tldts": "bin/cli.js" } }, "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ=="], + "tsx/esbuild": ["esbuild@0.27.7", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.7", "@esbuild/android-arm": "0.27.7", "@esbuild/android-arm64": "0.27.7", "@esbuild/android-x64": "0.27.7", "@esbuild/darwin-arm64": "0.27.7", "@esbuild/darwin-x64": "0.27.7", "@esbuild/freebsd-arm64": "0.27.7", "@esbuild/freebsd-x64": "0.27.7", "@esbuild/linux-arm": "0.27.7", "@esbuild/linux-arm64": "0.27.7", "@esbuild/linux-ia32": "0.27.7", "@esbuild/linux-loong64": "0.27.7", "@esbuild/linux-mips64el": "0.27.7", "@esbuild/linux-ppc64": "0.27.7", "@esbuild/linux-riscv64": "0.27.7", "@esbuild/linux-s390x": "0.27.7", "@esbuild/linux-x64": "0.27.7", "@esbuild/netbsd-arm64": "0.27.7", "@esbuild/netbsd-x64": "0.27.7", "@esbuild/openbsd-arm64": "0.27.7", "@esbuild/openbsd-x64": "0.27.7", "@esbuild/openharmony-arm64": "0.27.7", "@esbuild/sunos-x64": "0.27.7", "@esbuild/win32-arm64": "0.27.7", "@esbuild/win32-ia32": "0.27.7", "@esbuild/win32-x64": "0.27.7" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w=="], "tsyringe/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], @@ -4624,10 +4601,6 @@ "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="], - "@aws-sdk/client-sso/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/dynamodb-codec/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.22", "", { "dependencies": { "@nodable/entities": "2.1.0", "@smithy/types": "^4.14.1", "fast-xml-parser": "5.7.2", "tslib": "^2.6.2" } }, "sha512-PMYKKtJd70IsSG0yHrdAbxBr+ZWBKLvzFZfD3/urxgf6hXVMzuU5M+3MJ5G67RpOmLBu1fAUN65SbWuKUCOlAA=="], - "@babel/helper-compilation-targets/lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], "@browserbasehq/sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="], @@ -4638,10 +4611,6 @@ "@browserbasehq/stagehand/@anthropic-ai/sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], - "@browserbasehq/stagehand/@modelcontextprotocol/sdk/express-rate-limit": ["express-rate-limit@7.5.1", "", { "peerDependencies": { "express": ">= 4.11" } }, "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw=="], - - "@browserbasehq/stagehand/@modelcontextprotocol/sdk/jose": ["jose@6.2.3", "", {}, "sha512-YYVDInQKFJfR/xa3ojUTl8c2KoTwiL1R5Wg9YCydwH0x0B9grbzlg5HC7mMjCtUJjbQ/YnGEZIhI5tCgfTb4Hw=="], - "@cerebras/cerebras_cloud_sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="], "@cerebras/cerebras_cloud_sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], @@ -4694,7 +4663,7 @@ "@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], - "@puppeteer/browsers/tar-fs/tar-stream": ["tar-stream@3.1.8", "", { "dependencies": { "b4a": "^1.6.4", "bare-fs": "^4.5.5", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-U6QpVRyCGHva435KoNWy9PRoi2IFYCgtEhq9nmrPPpbRacPs9IH4aJ3gbrFC8dPcXvdSZ4XXfXT5Fshbp2MtlQ=="], + "@puppeteer/browsers/tar-fs/tar-stream": ["tar-stream@3.2.0", "", { "dependencies": { "b4a": "^1.6.4", "bare-fs": "^4.5.5", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-ojzvCvVaNp6aOTFmG7jaRD0meowIAuPc3cMMhSgKiVWws1GyHbGd/xvnyuRKcKlMpt3qvxx6r0hreCNITP9hIg=="], "@radix-ui/react-label/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.4", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA=="], @@ -4746,30 +4715,6 @@ "@types/node-fetch/@types/node/undici-types": ["undici-types@7.19.2", "", {}, "sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg=="], - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-ugHZEoktD/bG6mdgmhzLDjMP2VrYRAUPRPF1DpCyiZexkH7DCU7XrSJyXMvkcf0DHV+URk0q2sLf/oqn1D2uYw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - "@types/nodemailer/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], "@types/papaparse/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], @@ -4808,8 +4753,6 @@ "express/accepts/negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], - "fetch-cookie/tough-cookie/tldts": ["tldts@7.0.29", "", { "dependencies": { "tldts-core": "^7.0.29" }, "bin": { "tldts": "bin/cli.js" } }, "sha512-JIXCerhudr/N6OWLwLF1HVsTTUo7ry6qHa5eWZEkiMuxsIiAACL55tGLfqfHfoH7QaMQUW8fngD7u7TxWexYQg=="], - "ffmpeg-static/https-proxy-agent/agent-base": ["agent-base@6.0.2", "", { "dependencies": { "debug": "4" } }, "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ=="], "form-data/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], @@ -5000,8 +4943,6 @@ "oauth2-mock-server/express/merge-descriptors": ["merge-descriptors@1.0.3", "", {}, "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ=="], - "oauth2-mock-server/express/path-to-regexp": ["path-to-regexp@0.1.13", "", {}, "sha512-A/AGNMFN3c8bOlvV9RreMdrv7jsmF9XIfDeCd87+I8RNg6s78BhJxMu69NEMHBSJFxKidViTEdruRwEk/WIKqA=="], - "oauth2-mock-server/express/qs": ["qs@6.14.2", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q=="], "oauth2-mock-server/express/send": ["send@0.19.2", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "~0.5.2", "http-errors": "~2.0.1", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "~2.4.1", "range-parser": "~1.2.1", "statuses": "~2.0.2" } }, "sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg=="], @@ -5054,6 +4995,8 @@ "test-exclude/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="], + "tough-cookie/tldts/tldts-core": ["tldts-core@6.1.86", "", {}, "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA=="], + "tsx/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.7", "", { "os": "aix", "cpu": "ppc64" }, "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg=="], "tsx/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.27.7", "", { "os": "android", "cpu": "arm" }, "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ=="], @@ -5166,8 +5109,6 @@ "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="], - "@aws-sdk/client-sso/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - "@browserbasehq/sdk/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], "@browserbasehq/sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], @@ -5190,26 +5131,8 @@ "@trigger.dev/core/socket.io/engine.io/ws": ["ws@8.17.1", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ=="], - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region/@aws-sdk/middleware-sdk-s3": ["@aws-sdk/middleware-sdk-s3@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-arn-parser": "3.893.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-config-provider": "^4.2.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-stream": "^4.5.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-JYkLjgS1wLoKHJ40G63+afM1ehmsPsjcmrHirKh8+kSCx4ip7+nL1e/twV4Zicxr8RJi9Y0Ahq5mDvneilDDKQ=="], - "cli-truncate/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="], - "fetch-cookie/tough-cookie/tldts/tldts-core": ["tldts-core@7.0.29", "", {}, "sha512-W99NuU7b1DcG3uJ3v9k9VztCH3WialNbBkBft5wCs8V8mexu0XQqaZEYb9l9RNNzK8+3EJ9PKWB0/RUtTQ/o+Q=="], - "fumadocs-core/shiki/@shikijs/core/@shikijs/primitive": ["@shikijs/primitive@4.0.2", "", { "dependencies": { "@shikijs/types": "4.0.2", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-M6UMPrSa3fN5ayeJwFVl9qWofl273wtK1VG8ySDZ1mQBfhCpdd8nEx7nPZ/tk7k+TYcpqBZzj/AnwxT9lO+HJw=="], "fumadocs-openapi/shiki/@shikijs/core/@shikijs/primitive": ["@shikijs/primitive@4.0.2", "", { "dependencies": { "@shikijs/types": "4.0.2", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-M6UMPrSa3fN5ayeJwFVl9qWofl273wtK1VG8ySDZ1mQBfhCpdd8nEx7nPZ/tk7k+TYcpqBZzj/AnwxT9lO+HJw=="], @@ -5288,18 +5211,6 @@ "@trigger.dev/core/socket.io/engine.io/@types/node/undici-types": ["undici-types@7.19.2", "", {}, "sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg=="], - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-x0mdv6DkjXqXEcQj3URbCltEzW6hoy/1uIL+i8gExP6YKrnhiZ7SzuB4gPls2UOpK5UqLiqXjhRLfBb1C9i4Dw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-k5qbRe/ZFjW9oWEdzLIa2twRVIEx7p/9rutofyrRysrtEnYh3HAWCngAnwbgKMoiwa806UzcTRx0TjyEpnKcCg=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-x0mdv6DkjXqXEcQj3URbCltEzW6hoy/1uIL+i8gExP6YKrnhiZ7SzuB4gPls2UOpK5UqLiqXjhRLfBb1C9i4Dw=="], - - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region/@aws-sdk/middleware-sdk-s3/@aws-sdk/util-arn-parser": ["@aws-sdk/util-arn-parser@3.893.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA=="], - "lint-staged/listr2/cli-truncate/string-width/strip-ansi": ["strip-ansi@7.2.0", "", { "dependencies": { "ansi-regex": "^6.2.2" } }, "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w=="], "lint-staged/listr2/log-update/cli-cursor/restore-cursor": ["restore-cursor@5.1.0", "", { "dependencies": { "onetime": "^7.0.0", "signal-exit": "^4.1.0" } }, "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA=="], @@ -5324,8 +5235,6 @@ "test-exclude/glob/jackspeak/@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], - "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-x0mdv6DkjXqXEcQj3URbCltEzW6hoy/1uIL+i8gExP6YKrnhiZ7SzuB4gPls2UOpK5UqLiqXjhRLfBb1C9i4Dw=="], - "lint-staged/listr2/cli-truncate/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="], "lint-staged/listr2/log-update/cli-cursor/restore-cursor/onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="], From 02f381d08c13bf96e193d866379aa651f8a6f8bb Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Sat, 9 May 2026 14:31:31 -0700 Subject: [PATCH 31/33] feat(pptx): replace viewer with custom preview (#4536) * feat(pptx): replace viewer with custom preview * address comments * address comments * address bugbot * address comment * rm bun lock * resolve conflict * update credit comment * address comments * rm bun lock * resolve conflict * make previews consistent for zoom and scroll * zoom fix --- apps/sim/app/(auth)/signup/signup-form.tsx | 2 +- apps/sim/app/api/files/parse/route.test.ts | 44 + apps/sim/app/api/files/parse/route.ts | 16 +- .../components/file-viewer/docx-preview.tsx | 214 +- .../components/file-viewer/pdf-viewer.tsx | 137 +- .../components/file-viewer/pptx-preview.tsx | 211 +- .../file-viewer/pptx-sandbox-host.tsx | 218 + .../file-viewer/preview-toolbar.tsx | 130 + .../file-viewer/preview-wheel-zoom.ts | 24 + .../file-viewer/zoomable-preview.tsx | 125 +- apps/sim/lib/mothership/inbox/response.ts | 309 +- apps/sim/lib/pptx-renderer/core/viewer.ts | 968 +++ .../export/serialize-presentation.ts | 201 + apps/sim/lib/pptx-renderer/index.ts | 18 + apps/sim/lib/pptx-renderer/model/layout.ts | 186 + apps/sim/lib/pptx-renderer/model/master.ts | 80 + .../pptx-renderer/model/nodes/base-node.ts | 169 + .../pptx-renderer/model/nodes/chart-node.ts | 55 + .../pptx-renderer/model/nodes/group-node.ts | 62 + .../lib/pptx-renderer/model/nodes/pic-node.ts | 102 + .../pptx-renderer/model/nodes/shape-node.ts | 267 + .../pptx-renderer/model/nodes/table-node.ts | 135 + .../pptx-renderer/model/presentation.test.ts | 79 + .../lib/pptx-renderer/model/presentation.ts | 486 ++ apps/sim/lib/pptx-renderer/model/slide.ts | 329 + apps/sim/lib/pptx-renderer/model/theme.ts | 95 + .../lib/pptx-renderer/model/xml-helpers.ts | 33 + .../lib/pptx-renderer/parser/rel-parser.ts | 81 + apps/sim/lib/pptx-renderer/parser/units.ts | 59 + .../lib/pptx-renderer/parser/xml-parser.ts | 105 + .../pptx-renderer/parser/zip-parser.test.ts | 51 + .../lib/pptx-renderer/parser/zip-parser.ts | 269 + .../renderer/background-renderer.test.ts | 98 + .../renderer/background-renderer.ts | 208 + .../pptx-renderer/renderer/chart-renderer.ts | 3413 +++++++++ .../pptx-renderer/renderer/group-renderer.ts | 218 + .../pptx-renderer/renderer/image-renderer.ts | 656 ++ .../renderer/predefined-table-styles.ts | 805 ++ .../pptx-renderer/renderer/render-context.ts | 80 + .../pptx-renderer/renderer/shape-renderer.ts | 1522 ++++ .../pptx-renderer/renderer/slide-renderer.ts | 315 + .../pptx-renderer/renderer/style-resolver.ts | 815 ++ .../pptx-renderer/renderer/table-renderer.ts | 608 ++ .../pptx-renderer/renderer/text-renderer.ts | 988 +++ .../pptx-renderer/shapes/custom-geometry.ts | 178 + apps/sim/lib/pptx-renderer/shapes/presets.ts | 6591 +++++++++++++++++ .../sim/lib/pptx-renderer/shapes/shape-arc.ts | 44 + .../lib/pptx-renderer/sim-pptx-viewer.test.ts | 65 + apps/sim/lib/pptx-renderer/sim-pptx-viewer.ts | 95 + apps/sim/lib/pptx-renderer/utils/color.ts | 488 ++ .../sim/lib/pptx-renderer/utils/emf-parser.ts | 289 + apps/sim/lib/pptx-renderer/utils/media.ts | 73 + .../lib/pptx-renderer/utils/pdf-renderer.ts | 198 + .../lib/pptx-renderer/utils/preview-scale.ts | 40 + .../pptx-renderer/utils/url-safety.test.ts | 16 + .../sim/lib/pptx-renderer/utils/url-safety.ts | 17 + apps/sim/package.json | 5 +- bun.lock | 134 +- 58 files changed, 22737 insertions(+), 482 deletions(-) create mode 100644 apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/pptx-sandbox-host.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-toolbar.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-wheel-zoom.ts create mode 100644 apps/sim/lib/pptx-renderer/core/viewer.ts create mode 100644 apps/sim/lib/pptx-renderer/export/serialize-presentation.ts create mode 100644 apps/sim/lib/pptx-renderer/index.ts create mode 100644 apps/sim/lib/pptx-renderer/model/layout.ts create mode 100644 apps/sim/lib/pptx-renderer/model/master.ts create mode 100644 apps/sim/lib/pptx-renderer/model/nodes/base-node.ts create mode 100644 apps/sim/lib/pptx-renderer/model/nodes/chart-node.ts create mode 100644 apps/sim/lib/pptx-renderer/model/nodes/group-node.ts create mode 100644 apps/sim/lib/pptx-renderer/model/nodes/pic-node.ts create mode 100644 apps/sim/lib/pptx-renderer/model/nodes/shape-node.ts create mode 100644 apps/sim/lib/pptx-renderer/model/nodes/table-node.ts create mode 100644 apps/sim/lib/pptx-renderer/model/presentation.test.ts create mode 100644 apps/sim/lib/pptx-renderer/model/presentation.ts create mode 100644 apps/sim/lib/pptx-renderer/model/slide.ts create mode 100644 apps/sim/lib/pptx-renderer/model/theme.ts create mode 100644 apps/sim/lib/pptx-renderer/model/xml-helpers.ts create mode 100644 apps/sim/lib/pptx-renderer/parser/rel-parser.ts create mode 100644 apps/sim/lib/pptx-renderer/parser/units.ts create mode 100644 apps/sim/lib/pptx-renderer/parser/xml-parser.ts create mode 100644 apps/sim/lib/pptx-renderer/parser/zip-parser.test.ts create mode 100644 apps/sim/lib/pptx-renderer/parser/zip-parser.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/background-renderer.test.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/background-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/chart-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/group-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/image-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/predefined-table-styles.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/render-context.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/shape-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/slide-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/style-resolver.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/table-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/renderer/text-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/shapes/custom-geometry.ts create mode 100644 apps/sim/lib/pptx-renderer/shapes/presets.ts create mode 100644 apps/sim/lib/pptx-renderer/shapes/shape-arc.ts create mode 100644 apps/sim/lib/pptx-renderer/sim-pptx-viewer.test.ts create mode 100644 apps/sim/lib/pptx-renderer/sim-pptx-viewer.ts create mode 100644 apps/sim/lib/pptx-renderer/utils/color.ts create mode 100644 apps/sim/lib/pptx-renderer/utils/emf-parser.ts create mode 100644 apps/sim/lib/pptx-renderer/utils/media.ts create mode 100644 apps/sim/lib/pptx-renderer/utils/pdf-renderer.ts create mode 100644 apps/sim/lib/pptx-renderer/utils/preview-scale.ts create mode 100644 apps/sim/lib/pptx-renderer/utils/url-safety.test.ts create mode 100644 apps/sim/lib/pptx-renderer/utils/url-safety.ts diff --git a/apps/sim/app/(auth)/signup/signup-form.tsx b/apps/sim/app/(auth)/signup/signup-form.tsx index ae39c7c7bcf..b2b6391a99e 100644 --- a/apps/sim/app/(auth)/signup/signup-form.tsx +++ b/apps/sim/app/(auth)/signup/signup-form.tsx @@ -271,7 +271,7 @@ function SignupFormContent({ githubAvailable, googleAvailable, isProduction }: S ...(token ? { 'x-captcha-response': token } : {}), }, onError: (ctx) => { - logger.error('Signup error:', ctx.error) + logger.warn('Signup error:', ctx.error) const errorMessage: string[] = ['Failed to create account'] let errorCode = 'unknown' diff --git a/apps/sim/app/api/files/parse/route.test.ts b/apps/sim/app/api/files/parse/route.test.ts index e2c032b4718..4283b6723b8 100644 --- a/apps/sim/app/api/files/parse/route.test.ts +++ b/apps/sim/app/api/files/parse/route.test.ts @@ -69,6 +69,7 @@ vi.mock('@/app/api/files/authorization', () => ({ vi.mock('@/lib/uploads', () => ({ getStorageProvider: mockGetStorageProvider, isUsingCloudStorage: mockIsUsingCloudStorage, + StorageService: storageServiceMock, })) vi.mock('@/lib/file-parsers', () => ({ @@ -172,6 +173,7 @@ describe('File Parse API Route', () => { permissionsMockFns.mockGetUserEntityPermissions.mockResolvedValue({ canView: true }) storageServiceMockFns.mockHasCloudStorage.mockReturnValue(true) + storageServiceMockFns.mockDownloadFile.mockResolvedValue(Buffer.from('test file content')) mockIsSupportedFileType.mockReturnValue(true) mockParseFile.mockResolvedValue({ content: 'parsed content', @@ -245,6 +247,48 @@ describe('File Parse API Route', () => { } }) + it('should keep known binary extensions as binary even when the bytes are valid UTF-8', async () => { + setupFileApiMocks({ + cloudEnabled: true, + storageProvider: 's3', + authenticated: true, + }) + mockIsSupportedFileType.mockReturnValue(false) + storageServiceMockFns.mockDownloadFile.mockResolvedValue(Buffer.from('valid utf8 bytes')) + + const req = createMockRequest('POST', { + filePath: '/api/files/serve/execution/workspace-1/workflow-1/execution-1/image.png', + }) + + const response = await POST(req) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(data.output.content).toBe('[Binary PNG file - 16 bytes]') + }) + + it('should parse unknown extensions as text when the bytes look like UTF-8 text', async () => { + setupFileApiMocks({ + cloudEnabled: true, + storageProvider: 's3', + authenticated: true, + }) + mockIsSupportedFileType.mockReturnValue(false) + storageServiceMockFns.mockDownloadFile.mockResolvedValue(Buffer.from('plain text content')) + + const req = createMockRequest('POST', { + filePath: '/api/files/serve/execution/workspace-1/workflow-1/execution-1/readme.customtext', + }) + + const response = await POST(req) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(data.output.content).toBe('plain text content') + }) + it('should handle multiple files', async () => { setupFileApiMocks({ cloudEnabled: false, diff --git a/apps/sim/app/api/files/parse/route.ts b/apps/sim/app/api/files/parse/route.ts index 09f26086eb1..05709621ce4 100644 --- a/apps/sim/app/api/files/parse/route.ts +++ b/apps/sim/app/api/files/parse/route.ts @@ -1,4 +1,4 @@ -import { Buffer } from 'buffer' +import { Buffer, isUtf8 } from 'buffer' import { createHash } from 'crypto' import fsPromises, { readFile } from 'fs/promises' import path from 'path' @@ -39,6 +39,11 @@ const logger = createLogger('FilesParseAPI') const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds +const BINARY_EXTENSIONS = new Set(binaryExtensionsList) + +function isLikelyTextBuffer(fileBuffer: Buffer): boolean { + return isUtf8(fileBuffer) && !fileBuffer.includes(0) +} interface ExecutionContext { workspaceId: string @@ -863,10 +868,11 @@ function handleGenericBuffer( extension: string, fileType?: string ): ParseResult { - const isBinary = binaryExtensionsList.includes(extension) - const content = isBinary - ? `[Binary ${extension.toUpperCase()} file - ${fileBuffer.length} bytes]` - : fileBuffer.toString('utf-8') + const normalizedExtension = extension.toLowerCase() + const content = + !BINARY_EXTENSIONS.has(normalizedExtension) && isLikelyTextBuffer(fileBuffer) + ? fileBuffer.toString('utf-8') + : `[Binary ${normalizedExtension.toUpperCase()} file - ${fileBuffer.length} bytes]` return { success: true, diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/docx-preview.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/docx-preview.tsx index a9c54372f04..35de183ddfc 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/docx-preview.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/docx-preview.tsx @@ -7,23 +7,36 @@ import { cn } from '@/lib/core/utils/cn' import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace' import { useWorkspaceFileBinary } from '@/hooks/queries/workspace-files' import { PDF_PAGE_SKELETON, PreviewError, resolvePreviewError } from './preview-shared' +import { PreviewToolbar } from './preview-toolbar' +import { bindPreviewWheelZoom } from './preview-wheel-zoom' const logger = createLogger('DocxPreview') +const DOCX_ZOOM_MIN = 25 +const DOCX_ZOOM_MAX = 400 +const DOCX_ZOOM_STEP = 20 +const DOCX_ZOOM_WHEEL_SENSITIVITY = 0.005 + /** * Fit the rendered docx pages to the host container width using a CSS scale. * The library renders `
    ` at the document's natural page * width (in cm), which overflows narrow panels. */ -function fitDocxToContainer(host: HTMLElement) { +function fitDocxToContainer(host: HTMLElement, viewport: HTMLElement, zoomPercent: number) { const wrapper = host.querySelector('.docx-wrapper') if (!wrapper) return const section = wrapper.querySelector('section.docx') if (!section) return - wrapper.style.transform = '' - wrapper.style.transformOrigin = 'top left' + host.style.minWidth = '' + host.style.minHeight = '' + host.style.width = '' + host.style.display = 'flex' + host.style.flexDirection = 'column' + host.style.alignItems = 'center' + wrapper.style.zoom = '' wrapper.style.width = '' + wrapper.style.flex = '0 0 auto' wrapper.style.marginRight = '' wrapper.style.marginBottom = '' @@ -34,16 +47,16 @@ function fitDocxToContainer(host: HTMLElement) { const horizontalPadding = Number.parseFloat(wrapperStyle.paddingLeft) + Number.parseFloat(wrapperStyle.paddingRight) const naturalWrapperWidth = naturalPageWidth + horizontalPadding - const available = host.clientWidth - const scale = Math.min(1, available / naturalWrapperWidth) - - if (scale >= 1) return + const available = viewport.clientWidth + const fitScale = Math.min(1, available / naturalWrapperWidth) + const scale = fitScale * (zoomPercent / 100) + const scaledWrapperWidth = naturalWrapperWidth * scale wrapper.style.width = `${naturalWrapperWidth}px` - wrapper.style.transform = `scale(${scale})` - const naturalHeight = wrapper.offsetHeight - wrapper.style.marginRight = `${(scale - 1) * naturalWrapperWidth}px` - wrapper.style.marginBottom = `${(scale - 1) * naturalHeight}px` + wrapper.style.zoom = String(scale) + host.style.width = `${Math.max(available, scaledWrapperWidth)}px` + host.style.minWidth = `${scaledWrapperWidth}px` + host.style.minHeight = `${wrapper.offsetHeight * scale}px` } export const DocxPreview = memo(function DocxPreview({ @@ -56,7 +69,9 @@ export const DocxPreview = memo(function DocxPreview({ streamingContent?: string }) { const containerRef = useRef(null) + const scrollContainerRef = useRef(null) const lastSuccessfulHtmlRef = useRef('') + const zoomPercentRef = useRef(100) const { data: fileData, isLoading, @@ -65,25 +80,106 @@ export const DocxPreview = memo(function DocxPreview({ const [renderError, setRenderError] = useState(null) const [rendering, setRendering] = useState(false) const [hasRenderedPreview, setHasRenderedPreview] = useState(false) + const [zoomPercent, setZoomPercent] = useState(100) + const [pageCount, setPageCount] = useState(0) + const [currentPage, setCurrentPage] = useState(1) + const [documentRenderVersion, setDocumentRenderVersion] = useState(0) const applyPostRenderStyling = useCallback(() => { const container = containerRef.current - if (!container) return + const scrollContainer = scrollContainerRef.current + if (!container || !scrollContainer) return const wrapper = container.querySelector('.docx-wrapper') if (wrapper) wrapper.style.background = 'transparent' - container.querySelectorAll('section.docx').forEach((page) => { + const pages = Array.from(container.querySelectorAll('section.docx')) + pages.forEach((page, index) => { page.style.boxShadow = 'var(--shadow-medium)' + page.dataset.page = String(index + 1) }) - fitDocxToContainer(container) + setPageCount((previous) => (previous === pages.length ? previous : pages.length)) + setCurrentPage((current) => (pages.length > 0 ? Math.min(current, pages.length) : 1)) + fitDocxToContainer(container, scrollContainer, zoomPercentRef.current) }, []) useEffect(() => { + const scrollContainer = scrollContainerRef.current + if (!scrollContainer) return + const observer = new ResizeObserver(() => applyPostRenderStyling()) + observer.observe(scrollContainer) + return () => observer.disconnect() + }, [applyPostRenderStyling]) + + const applyZoomAt = useCallback( + (nextZoom: number, anchorX: number, anchorY: number) => { + const scrollContainer = scrollContainerRef.current + if (!scrollContainer) return + + const clampedZoom = Math.round(Math.min(Math.max(nextZoom, DOCX_ZOOM_MIN), DOCX_ZOOM_MAX)) + const wrapper = containerRef.current?.querySelector('.docx-wrapper') + const containerRect = scrollContainer.getBoundingClientRect() + const anchorClientX = containerRect.left + anchorX + const anchorClientY = containerRect.top + anchorY + const beforeRect = wrapper?.getBoundingClientRect() + const anchorRatioX = + beforeRect && beforeRect.width > 0 + ? (anchorClientX - beforeRect.left) / beforeRect.width + : 0 + const anchorRatioY = + beforeRect && beforeRect.height > 0 + ? (anchorClientY - beforeRect.top) / beforeRect.height + : 0 + + zoomPercentRef.current = clampedZoom + setZoomPercent(clampedZoom) + applyPostRenderStyling() + + const afterRect = wrapper?.getBoundingClientRect() + if (!beforeRect || !afterRect) return + + scrollContainer.scrollLeft += afterRect.left + anchorRatioX * afterRect.width - anchorClientX + scrollContainer.scrollTop += afterRect.top + anchorRatioY * afterRect.height - anchorClientY + }, + [applyPostRenderStyling] + ) + + useEffect(() => { + const scrollContainer = scrollContainerRef.current + if (!scrollContainer) return + + return bindPreviewWheelZoom(scrollContainer, (event) => { + const rect = scrollContainer.getBoundingClientRect() + applyZoomAt( + zoomPercentRef.current * (1 - event.deltaY * DOCX_ZOOM_WHEEL_SENSITIVITY), + event.clientX - rect.left, + event.clientY - rect.top + ) + }) + }, [applyZoomAt]) + + useEffect(() => { + const scrollContainer = scrollContainerRef.current const container = containerRef.current - if (!container) return - const observer = new ResizeObserver(() => fitDocxToContainer(container)) - observer.observe(container) + if (!scrollContainer || !container || pageCount === 0) return + + const pages = Array.from(container.querySelectorAll('section.docx')) + const observer = new IntersectionObserver( + (entries) => { + for (const entry of entries) { + if (entry.isIntersecting) { + const page = Number((entry.target as HTMLElement).dataset.page) + if (page) setCurrentPage(page) + } + } + }, + { root: scrollContainer, threshold: 0.5 } + ) + + for (const page of pages) { + observer.observe(page) + } + return () => observer.disconnect() - }, []) + }, [pageCount, documentRenderVersion]) useEffect(() => { if (!containerRef.current || !fileData || streamingContent !== undefined) return @@ -106,6 +202,7 @@ export const DocxPreview = memo(function DocxPreview({ applyPostRenderStyling() lastSuccessfulHtmlRef.current = containerRef.current.innerHTML setHasRenderedPreview(true) + setDocumentRenderVersion((version) => version + 1) } } catch (err) { if (!cancelled) { @@ -128,6 +225,7 @@ export const DocxPreview = memo(function DocxPreview({ useEffect(() => { if (streamingContent === undefined || !containerRef.current) return + if (streamingContent.trim().length === 0) return let cancelled = false const controller = new AbortController() @@ -155,6 +253,7 @@ export const DocxPreview = memo(function DocxPreview({ const arrayBuffer = await response.arrayBuffer() if (cancelled || !containerRef.current) return + if (arrayBuffer.byteLength === 0) return const { renderAsync } = await import('docx-preview') if (cancelled || !containerRef.current) return @@ -170,6 +269,7 @@ export const DocxPreview = memo(function DocxPreview({ applyPostRenderStyling() lastSuccessfulHtmlRef.current = containerRef.current.innerHTML setHasRenderedPreview(true) + setDocumentRenderVersion((version) => version + 1) } } catch (err) { if (!cancelled && !(err instanceof DOMException && err.name === 'AbortError')) { @@ -177,6 +277,7 @@ export const DocxPreview = memo(function DocxPreview({ containerRef.current.innerHTML = previousHtml applyPostRenderStyling() setHasRenderedPreview(true) + setDocumentRenderVersion((version) => version + 1) } const msg = toError(err).message || 'Failed to render document' logger.info('Transient DOCX streaming preview error (suppressed)', { error: msg }) @@ -201,15 +302,78 @@ export const DocxPreview = memo(function DocxPreview({ const showSkeleton = !hasRenderedPreview && (streamingContent !== undefined || isLoading || rendering) + const scrollToPage = (page: number) => { + const scrollContainer = scrollContainerRef.current + const target = containerRef.current?.querySelector( + `section.docx[data-page="${page}"]` + ) + if (!scrollContainer || !target) return + + if (zoomPercentRef.current !== 100) { + applyZoomAt(100, scrollContainer.clientWidth / 2, scrollContainer.clientHeight / 2) + } + + scrollContainer.scrollTo({ + top: target.offsetTop - scrollContainer.offsetTop - 16, + behavior: 'smooth', + }) + } + return ( -
    - {showSkeleton && ( -
    {PDF_PAGE_SKELETON}
    - )} -
    + 0 && currentPage > 1, + canNext: pageCount > 0 && currentPage < pageCount, + onPrevious: () => { + const previous = Math.max(1, currentPage - 1) + setCurrentPage(previous) + scrollToPage(previous) + }, + onNext: () => { + const next = Math.min(pageCount, currentPage + 1) + setCurrentPage(next) + scrollToPage(next) + }, + }} + zoom={{ + label: `${zoomPercent}%`, + canZoomOut: zoomPercent > DOCX_ZOOM_MIN, + canZoomIn: zoomPercent < DOCX_ZOOM_MAX, + onReset: () => { + const c = scrollContainerRef.current + applyZoomAt(100, c ? c.clientWidth / 2 : 0, c ? c.clientHeight / 2 : 0) + }, + onZoomOut: () => { + const c = scrollContainerRef.current + applyZoomAt( + zoomPercent - DOCX_ZOOM_STEP, + c ? c.clientWidth / 2 : 0, + c ? c.clientHeight / 2 : 0 + ) + }, + onZoomIn: () => { + const c = scrollContainerRef.current + applyZoomAt( + zoomPercent + DOCX_ZOOM_STEP, + c ? c.clientWidth / 2 : 0, + c ? c.clientHeight / 2 : 0 + ) + }, + }} /> +
    + {showSkeleton && ( +
    {PDF_PAGE_SKELETON}
    + )} +
    +
    ) }) diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/pdf-viewer.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/pdf-viewer.tsx index fa4a744ef45..53de47f1057 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/pdf-viewer.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/pdf-viewer.tsx @@ -2,10 +2,11 @@ import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { ChevronLeft, ChevronRight, ZoomIn, ZoomOut } from 'lucide-react' import { pdfjs, Document as ReactPdfDocument, Page as ReactPdfPage } from 'react-pdf' import 'react-pdf/dist/Page/TextLayer.css' -import { Button, Skeleton } from '@/components/emcn' +import { Skeleton } from '@/components/emcn' +import { PreviewToolbar } from '@/app/workspace/[workspaceId]/files/components/file-viewer/preview-toolbar' +import { bindPreviewWheelZoom } from '@/app/workspace/[workspaceId]/files/components/file-viewer/preview-wheel-zoom' pdfjs.GlobalWorkerOptions.workerSrc = new URL( 'pdfjs-dist/build/pdf.worker.min.mjs', @@ -120,9 +121,14 @@ export const PdfViewerCore = memo(function PdfViewerCore({ source, filename }: P }, []) const scrollToPage = (page: number) => { + const container = containerRef.current + if (container && zoomRef.current !== PDF_ZOOM_DEFAULT) { + applyZoomAt(PDF_ZOOM_DEFAULT, container.clientWidth / 2, container.clientHeight / 2) + } + const wrapper = pageRefs.current[page - 1] - if (wrapper && containerRef.current) { - containerRef.current.scrollTo({ top: wrapper.offsetTop - 16, behavior: 'smooth' }) + if (wrapper && container) { + container.scrollTo({ top: wrapper.offsetTop - 16, behavior: 'smooth' }) } } @@ -153,100 +159,57 @@ export const PdfViewerCore = memo(function PdfViewerCore({ source, filename }: P const container = containerRef.current if (!container) return - const onWheel = (e: WheelEvent) => { - if (!e.ctrlKey) return - e.preventDefault() - + return bindPreviewWheelZoom(container, (e) => { const next = Math.min( PDF_ZOOM_MAX, Math.max(PDF_ZOOM_MIN, zoomRef.current * (1 - e.deltaY * 0.005)) ) const rect = container.getBoundingClientRect() applyZoomAt(next, e.clientX - rect.left, e.clientY - rect.top) - } - - container.addEventListener('wheel', onWheel, { passive: false }) - return () => container.removeEventListener('wheel', onWheel) + }) }, [applyZoomAt]) return (
    {pageCount > 0 && !loadError && ( -
    -
    - - - {currentPage} / {pageCount} - - -
    - -
    - - - {Math.round(displayZoom * 100)}% - - -
    -
    + { + const prev = Math.max(1, currentPage - 1) + setCurrentPage(prev) + scrollToPage(prev) + }, + onNext: () => { + const next = Math.min(pageCount, currentPage + 1) + setCurrentPage(next) + scrollToPage(next) + }, + }} + zoom={{ + label: `${Math.round(displayZoom * 100)}%`, + canZoomOut: displayZoom > PDF_ZOOM_MIN, + canZoomIn: displayZoom < PDF_ZOOM_MAX, + onZoomOut: () => { + const c = containerRef.current + applyZoomAt( + Math.max(PDF_ZOOM_MIN, zoomRef.current / PDF_ZOOM_STEP), + c ? c.clientWidth / 2 : 0, + c ? c.clientHeight / 2 : 0 + ) + }, + onZoomIn: () => { + const c = containerRef.current + applyZoomAt( + Math.min(PDF_ZOOM_MAX, zoomRef.current * PDF_ZOOM_STEP), + c ? c.clientWidth / 2 : 0, + c ? c.clientHeight / 2 : 0 + ) + }, + }} + /> )}
    ) -const pptxSlideCache = new Map() - function pptxCacheKey(fileId: string, dataUpdatedAt: number, byteLength: number): string { return `${fileId}:${dataUpdatedAt}:${byteLength}` } -function pptxCacheSet(key: string, slides: string[]): void { - pptxSlideCache.set(key, slides) - if (pptxSlideCache.size > 5) { - const oldest = pptxSlideCache.keys().next().value - if (oldest !== undefined) pptxSlideCache.delete(oldest) - } -} - -async function renderPptxSlides( - data: Uint8Array, - onSlide: (src: string, index: number) => void, - cancelled: () => boolean -): Promise { - const { PPTXViewer } = await import('pptxviewjs') - if (cancelled()) return - - const dpr = Math.min(window.devicePixelRatio || 1, 2) - const { width, height } = await getPptxRenderSize(data, dpr) - const W = width - const H = height - - const canvas = document.createElement('canvas') - canvas.width = W - canvas.height = H - const viewer = new PPTXViewer({ canvas }) - await viewer.loadFile(data) - const count = viewer.getSlideCount() - if (cancelled() || count === 0) return - - for (let i = 0; i < count; i++) { - if (cancelled()) break - if (i === 0) await viewer.render() - else await viewer.goToSlide(i) - onSlide(canvas.toDataURL('image/jpeg', 0.85), i) - } -} - -async function getPptxRenderSize( - data: Uint8Array, - dpr: number -): Promise<{ width: number; height: number }> { - const fallback = { - width: Math.round(1920 * dpr), - height: Math.round(1080 * dpr), - } - - try { - const JSZip = (await import('jszip')).default - const zip = await JSZip.loadAsync(data) - const presentationXml = await zip.file('ppt/presentation.xml')?.async('text') - if (!presentationXml) return fallback - - const tagMatch = presentationXml.match(/]+>/) - if (!tagMatch) return fallback - const tag = tagMatch[0] - const cxMatch = tag.match(/\bcx="(\d+)"/) - const cyMatch = tag.match(/\bcy="(\d+)"/) - if (!cxMatch || !cyMatch) return fallback - - const cx = Number(cxMatch[1]) - const cy = Number(cyMatch[1]) - if (!Number.isFinite(cx) || !Number.isFinite(cy) || cx <= 0 || cy <= 0) return fallback - - const aspectRatio = cx / cy - if (!Number.isFinite(aspectRatio) || aspectRatio <= 0) return fallback - - const baseLongEdge = 1920 * dpr - if (aspectRatio >= 1) { - return { - width: Math.round(baseLongEdge), - height: Math.round(baseLongEdge / aspectRatio), - } - } - - return { - width: Math.round(baseLongEdge * aspectRatio), - height: Math.round(baseLongEdge), - } - } catch { - return fallback - } -} - export const PptxPreview = memo(function PptxPreview({ file, workspaceId, @@ -134,20 +53,20 @@ export const PptxPreview = memo(function PptxPreview({ }) { const { data: fileData, - isLoading: isFetching, error: fetchError, dataUpdatedAt, } = useWorkspaceFileBinary(workspaceId, file.id, file.key) const cacheKey = pptxCacheKey(file.id, dataUpdatedAt, fileData?.byteLength ?? 0) - const cached = pptxSlideCache.get(cacheKey) - const [slides, setSlides] = useState(cached ?? []) - const [rendering, setRendering] = useState(false) + const [streamBuffer, setStreamBuffer] = useState(null) + const [streamVersion, setStreamVersion] = useState(0) + const [hasRendered, setHasRendered] = useState(false) const [renderError, setRenderError] = useState(null) + const isStreaming = streamingContent !== undefined useEffect(() => { - if (streamingContent === undefined) return + if (!isStreaming) return let cancelled = false const controller = new AbortController() @@ -155,8 +74,6 @@ export const PptxPreview = memo(function PptxPreview({ const debounceTimer = setTimeout(async () => { if (cancelled) return try { - setRendering(true) - // boundary-raw-fetch: route returns binary PPTX (read via response.arrayBuffer()), not JSON const response = await fetch(`/api/workspaces/${workspaceId}/pptx/preview`, { method: 'POST', @@ -171,23 +88,14 @@ export const PptxPreview = memo(function PptxPreview({ if (cancelled) return const arrayBuffer = await response.arrayBuffer() if (cancelled) return - const data = new Uint8Array(arrayBuffer) - const images: string[] = [] - await renderPptxSlides( - data, - (src) => { - images.push(src) - if (!cancelled) setSlides([...images]) - }, - () => cancelled - ) + setRenderError(null) + setStreamBuffer(arrayBuffer) + setStreamVersion((version) => version + 1) } catch (err) { if (!cancelled && !(err instanceof DOMException && err.name === 'AbortError')) { const msg = toError(err).message || 'Failed to render presentation' logger.info('Transient PPTX streaming preview error (suppressed)', { error: msg }) } - } finally { - if (!cancelled) setRendering(false) } }, 500) @@ -196,77 +104,54 @@ export const PptxPreview = memo(function PptxPreview({ clearTimeout(debounceTimer) controller.abort() } - }, [streamingContent, workspaceId]) + }, [isStreaming, streamingContent, workspaceId]) useEffect(() => { - if (streamingContent !== undefined) return - - let cancelled = false - - async function render() { - if (cancelled) return - try { - if (cached) { - setSlides(cached) - return - } - - if (!fileData) return - setRendering(true) - setRenderError(null) - setSlides([]) - const data = new Uint8Array(fileData) - const images: string[] = [] - await renderPptxSlides( - data, - (src) => { - images.push(src) - if (!cancelled) setSlides([...images]) - }, - () => cancelled - ) - if (!cancelled && images.length > 0) { - pptxCacheSet(cacheKey, images) - } - } catch (err) { - if (!cancelled) { - const msg = toError(err).message || 'Failed to render presentation' - logger.error('PPTX render failed', { error: msg }) - setRenderError(msg) - } - } finally { - if (!cancelled) setRendering(false) - } - } + setRenderError(null) + setHasRendered(false) + if (!isStreaming) setStreamBuffer(null) + }, [cacheKey, isStreaming]) + + const activeBuffer = isStreaming ? streamBuffer : fileData + const activeRenderKey = isStreaming + ? `${file.id}:stream:${streamVersion}:${streamBuffer?.byteLength ?? 0}` + : cacheKey + + function handleRenderStart() { + if (!isStreaming) setRenderError(null) + } - render() + function handleRenderComplete() { + setHasRendered(true) + } - return () => { - cancelled = true + function handleRenderError(message: string) { + if (isStreaming) { + logger.info('Transient PPTX streaming render error (suppressed)', { error: message }) + return } - }, [fileData, streamingContent, cacheKey]) + logger.error('PPTX render failed', { error: message }) + setRenderError(message || 'Failed to render presentation') + } - const error = streamingContent !== undefined ? null : resolvePreviewError(fetchError, renderError) - const loading = isFetching || rendering + const error = isStreaming ? null : resolvePreviewError(fetchError, renderError) if (error) return - if ((loading || streamingContent !== undefined) && slides.length === 0) { + if (!activeBuffer) { return PPTX_SLIDE_SKELETON } return ( -
    -
    - {slides.map((src, i) => ( - {`Slide - ))} -
    +
    + + {!hasRendered &&
    {PPTX_SLIDE_SKELETON}
    }
    ) }) diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/pptx-sandbox-host.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/pptx-sandbox-host.tsx new file mode 100644 index 00000000000..5cdc245aaa3 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/pptx-sandbox-host.tsx @@ -0,0 +1,218 @@ +'use client' + +import { memo, useCallback, useEffect, useRef, useState } from 'react' +import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' +import { openSimPptxViewer, type SimPptxViewerHandle } from '@/lib/pptx-renderer/sim-pptx-viewer' +import { PreviewToolbar } from '@/app/workspace/[workspaceId]/files/components/file-viewer/preview-toolbar' +import { bindPreviewWheelZoom } from '@/app/workspace/[workspaceId]/files/components/file-viewer/preview-wheel-zoom' + +const logger = createLogger('PptxSandboxHost') + +const ZOOM_MIN = 25 +const ZOOM_MAX = 400 +const ZOOM_STEP = 20 +const ZOOM_WHEEL_SENSITIVITY = 0.005 + +interface PptxSandboxHostProps { + buffer: ArrayBuffer + requestId: string + onRenderStart?: () => void + onRenderComplete?: () => void + onRenderError?: (error: string) => void +} + +export const PptxSandboxHost = memo(function PptxSandboxHost({ + buffer, + requestId, + onRenderStart, + onRenderComplete, + onRenderError, +}: PptxSandboxHostProps) { + const stageRef = useRef(null) + const scrollContainerRef = useRef(null) + const activeHandleRef = useRef(null) + const activeContainerRef = useRef(null) + const renderSequenceRef = useRef(0) + const onRenderStartRef = useRef(onRenderStart) + const onRenderCompleteRef = useRef(onRenderComplete) + const onRenderErrorRef = useRef(onRenderError) + const zoomPercentRef = useRef(100) + + onRenderStartRef.current = onRenderStart + onRenderCompleteRef.current = onRenderComplete + onRenderErrorRef.current = onRenderError + const [zoomPercent, setZoomPercent] = useState(100) + const [slideCount, setSlideCount] = useState(0) + const [currentSlide, setCurrentSlide] = useState(1) + + useEffect(() => { + const stage = stageRef.current + if (!stage) return + + const controller = new AbortController() + const sequence = ++renderSequenceRef.current + const nextContainer = document.createElement('div') + nextContainer.dataset.requestId = requestId + nextContainer.style.width = '100%' + nextContainer.style.visibility = 'hidden' + stage.appendChild(nextContainer) + + onRenderStartRef.current?.() + + async function render() { + try { + const handle = await openSimPptxViewer({ + buffer, + container: nextContainer, + scrollContainer: scrollContainerRef.current ?? undefined, + signal: controller.signal, + onSlideChange: (index) => setCurrentSlide(index + 1), + onSlideError: (slideIndex, error) => { + logger.warn('PPTX slide render failed', { + slideIndex, + error: toError(error).message, + }) + }, + }) + + if (controller.signal.aborted || sequence !== renderSequenceRef.current) { + handle.destroy() + nextContainer.remove() + return + } + + const previousHandle = activeHandleRef.current + const previousContainer = activeContainerRef.current + activeHandleRef.current = handle + activeContainerRef.current = nextContainer + setSlideCount(handle.viewer.slideCount) + setCurrentSlide(handle.viewer.currentSlideIndex + 1) + if (zoomPercentRef.current !== 100) { + await handle.viewer.setZoom(zoomPercentRef.current) + } + nextContainer.style.visibility = 'visible' + previousHandle?.destroy() + previousContainer?.remove() + onRenderCompleteRef.current?.() + } catch (error) { + nextContainer.remove() + if (controller.signal.aborted) return + + const message = toError(error).message || 'Failed to render presentation' + logger.warn('PPTX render failed', { error: message }) + onRenderErrorRef.current?.(message) + } + } + + render() + + return () => { + controller.abort() + if (activeContainerRef.current !== nextContainer) { + nextContainer.remove() + } + } + }, [buffer, requestId]) + + useEffect(() => { + return () => { + renderSequenceRef.current += 1 + activeHandleRef.current?.destroy() + activeContainerRef.current?.remove() + } + }, []) + + const applyZoomAt = useCallback(async (nextZoom: number, anchorX: number, anchorY: number) => { + const container = scrollContainerRef.current + if (!container) return + + const clampedZoom = Math.round(Math.min(Math.max(nextZoom, ZOOM_MIN), ZOOM_MAX)) + const ratio = clampedZoom / zoomPercentRef.current + const style = window.getComputedStyle(container) + const paddingLeft = Number.parseFloat(style.paddingLeft) || 0 + const paddingTop = Number.parseFloat(style.paddingTop) || 0 + const previousScrollLeft = container.scrollLeft + const previousScrollTop = container.scrollTop + + zoomPercentRef.current = clampedZoom + setZoomPercent(clampedZoom) + await activeHandleRef.current?.viewer.setZoom(clampedZoom) + + container.scrollLeft = + (previousScrollLeft + anchorX - paddingLeft) * ratio + paddingLeft - anchorX + container.scrollTop = (previousScrollTop + anchorY - paddingTop) * ratio + paddingTop - anchorY + }, []) + + const applyZoomFromCenter = useCallback( + (nextZoom: number): Promise => { + const container = scrollContainerRef.current + return applyZoomAt( + nextZoom, + container ? container.clientWidth / 2 : 0, + container ? container.clientHeight / 2 : 0 + ) + }, + [applyZoomAt] + ) + + useEffect(() => { + const container = scrollContainerRef.current + if (!container) return + + return bindPreviewWheelZoom(container, (event) => { + const rect = container.getBoundingClientRect() + void applyZoomAt( + zoomPercentRef.current * (1 - event.deltaY * ZOOM_WHEEL_SENSITIVITY), + event.clientX - rect.left, + event.clientY - rect.top + ) + }) + }, [applyZoomAt]) + + async function goToSlide(slideNumber: number) { + if (!activeHandleRef.current || slideCount <= 0) return + const clampedSlide = Math.min(Math.max(slideNumber, 1), slideCount) + if (zoomPercentRef.current !== 100) { + await applyZoomFromCenter(100) + } + setCurrentSlide(clampedSlide) + await activeHandleRef.current.viewer.goToSlide(clampedSlide - 1) + } + + return ( +
    + 0 && currentSlide > 1, + canNext: slideCount > 0 && currentSlide < slideCount, + onPrevious: () => goToSlide(currentSlide - 1), + onNext: () => goToSlide(currentSlide + 1), + }} + zoom={{ + label: `${zoomPercent}%`, + canZoomOut: zoomPercent > ZOOM_MIN, + canZoomIn: zoomPercent < ZOOM_MAX, + onReset: () => { + void applyZoomFromCenter(100) + }, + onZoomOut: () => { + void applyZoomFromCenter(zoomPercent - ZOOM_STEP) + }, + onZoomIn: () => { + void applyZoomFromCenter(zoomPercent + ZOOM_STEP) + }, + }} + /> +
    +
    +
    +
    + ) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-toolbar.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-toolbar.tsx new file mode 100644 index 00000000000..a6cff06ad91 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-toolbar.tsx @@ -0,0 +1,130 @@ +import { ChevronLeft, ChevronRight, ZoomIn, ZoomOut } from 'lucide-react' +import { Button } from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' + +interface PreviewNavigationControls { + current: number + total: number + label: string + onPrevious: () => void + onNext: () => void + canPrevious?: boolean + canNext?: boolean +} + +interface PreviewZoomControls { + label: string + onZoomOut: () => void + onZoomIn: () => void + canZoomOut?: boolean + canZoomIn?: boolean + onReset?: () => void +} + +interface PreviewToolbarProps { + navigation?: PreviewNavigationControls + zoom?: PreviewZoomControls + className?: string +} + +export function PreviewToolbar({ navigation, zoom, className }: PreviewToolbarProps) { + return ( +
    +
    + {navigation && } +
    +
    {zoom && }
    +
    + ) +} + +function PreviewNavigationControls({ + current, + total, + label, + onPrevious, + onNext, + canPrevious = current > 1, + canNext = current < total, +}: PreviewNavigationControls) { + return ( + <> + + + {total > 0 ? `${current} / ${total}` : '0 / 0'} + + + + ) +} + +function PreviewZoomControls({ + label, + onZoomOut, + onZoomIn, + canZoomOut = true, + canZoomIn = true, + onReset, +}: PreviewZoomControls) { + return ( + <> + {onReset && ( + + )} + + + {label} + + + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-wheel-zoom.ts b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-wheel-zoom.ts new file mode 100644 index 00000000000..9d68f0b89bc --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-wheel-zoom.ts @@ -0,0 +1,24 @@ +/** + * Bind browser pinch/ctrl-wheel zoom and horizontal wheel gestures for preview scroll containers. + */ +export function bindPreviewWheelZoom( + container: HTMLElement, + onZoom: (event: WheelEvent) => void +): () => void { + const onWheel = (event: WheelEvent) => { + if (event.ctrlKey) { + event.preventDefault() + onZoom(event) + return + } + + const horizontalDelta = event.deltaX !== 0 ? event.deltaX : event.shiftKey ? event.deltaY : 0 + if (horizontalDelta === 0 || container.scrollWidth <= container.clientWidth) return + + event.preventDefault() + container.scrollLeft += horizontalDelta + } + + container.addEventListener('wheel', onWheel, { capture: true, passive: false }) + return () => container.removeEventListener('wheel', onWheel, { capture: true }) +} diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/zoomable-preview.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/zoomable-preview.tsx index 151b4325a8d..21f779526d1 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/zoomable-preview.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/zoomable-preview.tsx @@ -2,9 +2,8 @@ import type { MouseEvent, ReactNode } from 'react' import { useCallback, useEffect, useLayoutEffect, useRef, useState } from 'react' -import { ZoomIn, ZoomOut } from 'lucide-react' -import { Button } from '@/components/emcn' import { cn } from '@/lib/core/utils/cn' +import { PreviewToolbar } from './preview-toolbar' const ZOOM_MIN = 0.25 const ZOOM_MAX = 4 @@ -77,7 +76,7 @@ export function ZoomablePreview({ const [offset, setOffset] = useState({ x: 0, y: 0 }) const [containerSize, setContainerSize] = useState({ width: 0, height: 0 }) const [contentSize, setContentSize] = useState({ width: 0, height: 0 }) - const containerRef = useRef(null) + const viewportRef = useRef(null) const contentRef = useRef(null) const isDragging = useRef(false) const dragStart = useRef({ x: 0, y: 0 }) @@ -92,11 +91,27 @@ export function ZoomablePreview({ containerSizeRef.current = containerSize contentSizeRef.current = contentSize - const applyZoom = useCallback((nextZoom: number) => { + const applyZoom = useCallback((nextZoom: number, anchorX?: number, anchorY?: number) => { + const currentZoom = zoomRef.current + const ratio = nextZoom / currentZoom + const container = containerSizeRef.current + const anchorFromCenter = { + x: (anchorX ?? container.width / 2) - container.width / 2, + y: (anchorY ?? container.height / 2) - container.height / 2, + } + zoomRef.current = nextZoom setZoom(nextZoom) setOffset((currentOffset) => - clampOffset(containerSizeRef.current, contentSizeRef.current, currentOffset, nextZoom) + clampOffset( + container, + contentSizeRef.current, + { + x: currentOffset.x * ratio + anchorFromCenter.x * (1 - ratio), + y: currentOffset.y * ratio + anchorFromCenter.y * (1 - ratio), + }, + nextZoom + ) ) }, []) @@ -119,14 +134,19 @@ export function ZoomablePreview({ } useEffect(() => { - const el = containerRef.current + const el = viewportRef.current if (!el) return const onWheel = (e: WheelEvent) => { e.preventDefault() if (e.ctrlKey || e.metaKey) { hasInteractedRef.current = true - applyZoom(clampZoom(zoomRef.current * Math.exp(-e.deltaY * ZOOM_WHEEL_SENSITIVITY))) + const rect = el.getBoundingClientRect() + applyZoom( + clampZoom(zoomRef.current * Math.exp(-e.deltaY * ZOOM_WHEEL_SENSITIVITY)), + e.clientX - rect.left, + e.clientY - rect.top + ) } else { hasInteractedRef.current = true setOffset((currentOffset) => @@ -149,12 +169,12 @@ export function ZoomablePreview({ useLayoutEffect(() => { const updateSizes = () => { - setContainerSize(getElementSize(containerRef.current)) + setContainerSize(getElementSize(viewportRef.current)) setContentSize(getElementSize(contentRef.current)) } updateSizes() - const container = containerRef.current + const container = viewportRef.current const content = contentRef.current if (!container || !content) return @@ -200,7 +220,7 @@ export function ZoomablePreview({ isDragging.current = true dragStart.current = { x: e.clientX, y: e.clientY } offsetAtDragStart.current = offsetRef.current - if (containerRef.current) containerRef.current.style.cursor = 'grabbing' + if (viewportRef.current) viewportRef.current.style.cursor = 'grabbing' e.preventDefault() } @@ -221,66 +241,41 @@ export function ZoomablePreview({ const handleMouseUp = () => { isDragging.current = false - if (containerRef.current) containerRef.current.style.cursor = 'grab' + if (viewportRef.current) viewportRef.current.style.cursor = 'grab' } return ( -
    -
    -
    - {children} -
    -
    +
    + ZOOM_MIN, + canZoomIn: zoom < ZOOM_MAX, + onReset: fitToView, + onZoomOut: zoomOut, + onZoomIn: zoomIn, + }} + />
    e.stopPropagation()} + ref={viewportRef} + className='relative min-h-0 flex-1 cursor-grab overflow-hidden' + onMouseDown={handleMouseDown} + onMouseMove={handleMouseMove} + onMouseUp={handleMouseUp} + onMouseLeave={handleMouseUp} > - - - - {Math.round(zoom * 100)}% - - +
    +
    + {children} +
    +
    ) diff --git a/apps/sim/lib/mothership/inbox/response.ts b/apps/sim/lib/mothership/inbox/response.ts index cd41de83f13..aa241d05f1e 100644 --- a/apps/sim/lib/mothership/inbox/response.ts +++ b/apps/sim/lib/mothership/inbox/response.ts @@ -1,10 +1,7 @@ +import { type ComponentType, type CSSProperties, createElement, type ReactNode } from 'react' +import { Body, Head, Html, Link, Markdown, Section, Text } from '@react-email/components' +import { render } from '@react-email/render' import { createLogger } from '@sim/logger' -import { toHtml } from 'hast-util-to-html' -import remarkBreaks from 'remark-breaks' -import remarkGfm from 'remark-gfm' -import remarkParse from 'remark-parse' -import remarkRehype from 'remark-rehype' -import { unified } from 'unified' import { getBaseUrl } from '@/lib/core/utils/urls' import * as agentmail from '@/lib/mothership/inbox/agentmail-client' import { replaceUntilStable } from '@/lib/mothership/inbox/format' @@ -43,7 +40,7 @@ export async function sendInboxResponse( const html = result.success ? await renderEmailHtml(result.content, chatUrl) - : renderErrorHtml(result.error || 'Unknown error', chatUrl) + : await renderErrorHtml(result.error || 'Unknown error', chatUrl) try { const response = await agentmail.replyToMessage( @@ -63,27 +60,199 @@ export async function sendInboxResponse( } } -const EMAIL_STYLES = ` - body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Inter, Roboto, sans-serif; font-size: 15px; line-height: 25px; color: #1a1a1a; font-weight: 430; } - p { margin: 0 0 16px 0; } - h1, h2, h3, h4 { font-weight: 600; color: #1a1a1a; margin-top: 24px; margin-bottom: 12px; } - h1 { font-size: 24px; } h2 { font-size: 20px; } h3 { font-size: 16px; } h4 { font-size: 15px; } - strong { font-weight: 600; color: #1a1a1a; } - pre { background: #f3f3f3; padding: 16px; border-radius: 8px; border: 1px solid #ededed; overflow-x: auto; margin: 24px 0; } - code { background: #f3f3f3; padding: 2px 6px; border-radius: 4px; font-family: ui-monospace, SFMono-Regular, 'SF Mono', Menlo, monospace; font-size: 13px; color: #1a1a1a; } - pre code { background: none; padding: 0; font-size: 13px; line-height: 21px; } - table { border-collapse: collapse; margin: 16px 0; } - th, td { border: 1px solid #ededed; padding: 8px 12px; text-align: left; font-size: 14px; } - th { background: #f5f5f5; font-weight: 600; } - tr { border-bottom: 1px solid #ededed; } - blockquote { border-left: 4px solid #e0e0e0; margin: 16px 0; padding: 4px 16px; color: #525252; font-style: italic; } - a { color: #2563eb; text-decoration: underline; text-decoration-style: dashed; text-underline-offset: 2px; } - ul, ol { margin: 16px 0; padding-left: 24px; } - li { margin: 4px 0; } - hr { border: none; border-top: 1px solid #ededed; margin: 24px 0; } - .signature { color: #525252; margin-top: 32px; font-size: 14px; } - .signature a { color: #1a1a1a; text-decoration: underline; text-decoration-style: dashed; text-underline-offset: 2px; } -` +const FONT_FAMILY = "-apple-system, BlinkMacSystemFont, 'Segoe UI', Inter, Roboto, sans-serif" +const CODE_FONT_FAMILY = "ui-monospace, SFMono-Regular, 'SF Mono', Menlo, monospace" + +const emailStyles = { + body: { + fontFamily: FONT_FAMILY, + fontSize: '15px', + lineHeight: '25px', + color: '#1a1a1a', + fontWeight: 430, + }, + content: { + margin: 0, + }, + markdownContainer: { + margin: 0, + }, + signature: { + color: '#525252', + marginTop: '32px', + fontSize: '14px', + }, + signatureText: { + color: '#525252', + margin: '0 0 16px 0', + fontSize: '14px', + lineHeight: '25px', + fontFamily: FONT_FAMILY, + }, + signatureLink: { + color: '#1a1a1a', + textDecoration: 'underline', + textDecorationStyle: 'dashed', + textUnderlineOffset: '2px', + }, +} satisfies Record + +const markdownStyles = { + p: { + margin: '0 0 16px 0', + fontSize: '15px', + lineHeight: '25px', + color: '#1a1a1a', + fontFamily: FONT_FAMILY, + fontWeight: 430, + }, + h1: { + fontWeight: 600, + color: '#1a1a1a', + margin: '24px 0 12px 0', + fontSize: '24px', + lineHeight: '32px', + fontFamily: FONT_FAMILY, + }, + h2: { + fontWeight: 600, + color: '#1a1a1a', + margin: '24px 0 12px 0', + fontSize: '20px', + lineHeight: '28px', + fontFamily: FONT_FAMILY, + }, + h3: { + fontWeight: 600, + color: '#1a1a1a', + margin: '24px 0 12px 0', + fontSize: '16px', + lineHeight: '24px', + fontFamily: FONT_FAMILY, + }, + h4: { + fontWeight: 600, + color: '#1a1a1a', + margin: '24px 0 12px 0', + fontSize: '15px', + lineHeight: '25px', + fontFamily: FONT_FAMILY, + }, + strong: { + fontWeight: 600, + color: '#1a1a1a', + }, + codeInline: { + backgroundColor: '#f3f3f3', + padding: '2px 6px', + borderRadius: '4px', + fontFamily: CODE_FONT_FAMILY, + fontSize: '13px', + color: '#1a1a1a', + }, + codeBlock: { + backgroundColor: '#f3f3f3', + padding: '16px', + borderRadius: '8px', + border: '1px solid #ededed', + overflowX: 'auto', + margin: '24px 0', + fontFamily: CODE_FONT_FAMILY, + fontSize: '13px', + lineHeight: '21px', + color: '#1a1a1a', + }, + table: { + borderCollapse: 'collapse', + margin: '16px 0', + }, + th: { + border: '1px solid #ededed', + padding: '8px 12px', + textAlign: 'left', + fontSize: '14px', + backgroundColor: '#f5f5f5', + fontWeight: 600, + }, + td: { + border: '1px solid #ededed', + padding: '8px 12px', + textAlign: 'left', + fontSize: '14px', + }, + blockQuote: { + borderLeft: '4px solid #e0e0e0', + margin: '16px 0', + padding: '4px 16px', + color: '#525252', + fontStyle: 'italic', + }, + a: { + color: '#2563eb', + textDecoration: 'underline', + textDecorationStyle: 'dashed', + textUnderlineOffset: '2px', + }, + ul: { + margin: '16px 0', + paddingLeft: '24px', + }, + ol: { + margin: '16px 0', + paddingLeft: '24px', + }, + li: { + margin: '4px 0', + }, + hr: { + border: 'none', + borderTop: '1px solid #ededed', + margin: '24px 0', + }, +} satisfies Record + +interface InboxResponseEmailProps { + children?: ReactNode + chatUrl: string + linkLabel: string +} + +interface EmailMarkdownProps { + children?: string + markdownContainerStyles?: CSSProperties + markdownCustomStyles?: Record +} + +const EmailMarkdown = Markdown as ComponentType + +function InboxResponseEmail({ children, chatUrl, linkLabel }: InboxResponseEmailProps) { + return createElement( + Html, + { lang: 'en', dir: 'ltr' }, + createElement(Head), + createElement( + Body, + { style: emailStyles.body }, + createElement(Section, { style: emailStyles.content }, children), + createElement( + Section, + { style: emailStyles.signature }, + createElement( + Text, + { style: emailStyles.signatureText }, + createElement(Link, { href: chatUrl, style: emailStyles.signatureLink }, linkLabel) + ), + createElement( + Text, + { style: emailStyles.signatureText }, + 'Best,', + createElement('br'), + 'Mothership' + ) + ) + ) + ) +} function stripRawHtml(text: string): string { return text @@ -94,56 +263,54 @@ function stripRawHtml(text: string): string { .join('') } -function stripUnsafeUrls(html: string): string { - return html.replace(/href\s*=\s*"(javascript|vbscript|data):[^"]*"/gi, 'href="#"') +function preserveSoftBreaks(text: string): string { + return text + .split(/(```[\s\S]*?```)/g) + .map((segment, i) => (i % 2 === 0 ? segment.replace(/([^\n])\n(?=[^\n])/g, '$1 \n') : segment)) + .join('') } -const markdownProcessor = unified() - .use(remarkParse) - .use(remarkGfm) - .use(remarkBreaks) - .use(remarkRehype) - -async function markdownToHtml(markdown: string): Promise { - const mdast = markdownProcessor.parse(markdown) - const hast = await markdownProcessor.run(mdast) - return toHtml(hast) +function stripUnsafeUrls(html: string): string { + return html.replace(/href\s*=\s*(['"])(?:javascript|vbscript|data):.*?\1/gi, 'href="#"') } async function renderEmailHtml(markdown: string, chatUrl: string): Promise { - const bodyHtml = stripUnsafeUrls(await markdownToHtml(stripRawHtml(markdown))) - - return ` - -${bodyHtml} -
    -

    View full conversation

    -

    Best,
    Mothership

    -
    -` -} + const safeMarkdown = preserveSoftBreaks(stripRawHtml(markdown)) + const html = await render( + createElement( + InboxResponseEmail, + { chatUrl, linkLabel: 'View full conversation' }, + createElement( + EmailMarkdown, + { + markdownContainerStyles: emailStyles.markdownContainer, + markdownCustomStyles: markdownStyles, + }, + safeMarkdown + ) + ) + ) -function renderErrorHtml(error: string, chatUrl: string): string { - return ` - -

    I wasn't able to complete this task.

    -

    Error: ${escapeHtml(error)}

    -
    -

    View details

    -

    Best,
    Mothership

    -
    -` + return stripUnsafeUrls(html) } -function escapeHtml(str: string): string { - return str.replace(/&/g, '&').replace(//g, '>') -} +async function renderErrorHtml(error: string, chatUrl: string): Promise { + const html = await render( + createElement( + InboxResponseEmail, + { chatUrl, linkLabel: 'View details' }, + createElement( + Text, + { key: 'message', style: markdownStyles.p }, + "I wasn't able to complete this task." + ), + createElement( + Text, + { key: 'error', style: { ...markdownStyles.p, color: '#6b7280' } }, + `Error: ${error}` + ) + ) + ) -function escapeAttr(str: string): string { - return str - .replace(/&/g, '&') - .replace(/"/g, '"') - .replace(/'/g, ''') - .replace(//g, '>') + return stripUnsafeUrls(html) } diff --git a/apps/sim/lib/pptx-renderer/core/viewer.ts b/apps/sim/lib/pptx-renderer/core/viewer.ts new file mode 100644 index 00000000000..6dac3b530cb --- /dev/null +++ b/apps/sim/lib/pptx-renderer/core/viewer.ts @@ -0,0 +1,968 @@ +import type { ECharts } from 'echarts' +import { buildPresentation, type PresentationData } from '../model/presentation' +import type { ZipParseLimits } from '../parser/zip-parser' +import { parseZip } from '../parser/zip-parser' +import type { SlideHandle } from '../renderer/slide-renderer' +import { renderSlide as renderSlideInternal } from '../renderer/slide-renderer' +import { isAllowedExternalUrl } from '../utils/url-safety' + +export type { SlideHandle } from '../renderer/slide-renderer' + +export type FitMode = 'contain' | 'none' + +export type PreviewInput = ArrayBuffer | Uint8Array | Blob + +export interface ViewerOptions { + width?: number + /** Scaling mode. contain = fit container width, none = use intrinsic slide size. */ + fitMode?: FitMode + /** Initial zoom percentage. Effective scale = fitScale * zoomPercent/100. */ + zoomPercent?: number + /** + * Scroll container element used as IntersectionObserver root in list mode + * (both windowed mounting and scroll-based slide tracking). + * When omitted, the viewport (null root) is used. + */ + scrollContainer?: HTMLElement + /** Optional ZIP parsing limits for controlling resource usage and DoS surface. */ + zipLimits?: ZipParseLimits + onSlideChange?: (index: number) => void + onSlideRendered?: (index: number, element: HTMLElement) => void + onSlideError?: (index: number, error: unknown) => void + onSlideUnmounted?: (index: number) => void + onNodeError?: (nodeId: string, error: unknown) => void + onRenderStart?: () => void + onRenderComplete?: () => void +} + +export interface ListRenderOptions { + windowed?: boolean + batchSize?: number + initialSlides?: number + overscanViewport?: number + /** Show "Slide N" labels below each slide. Default `false`. */ + showSlideLabels?: boolean +} + +export interface PptxViewerEventMap { + renderstart: Event + rendercomplete: Event + slidechange: CustomEvent<{ index: number }> + sliderendered: CustomEvent<{ index: number; element: HTMLElement }> + slideerror: CustomEvent<{ index: number; error: unknown }> + slideunmounted: CustomEvent<{ index: number }> + nodeerror: CustomEvent<{ nodeId: string; error: unknown }> +} + +export class PptxViewer extends EventTarget { + protected container: HTMLElement + private viewerOptions: ViewerOptions + private presentation: PresentationData | null = null + private mediaUrlCache = new Map() + private chartInstances = new Set() + private currentSlide = 0 + private _fitMode: FitMode + private _isRendering = false + private zoomFactor = 1 + private renderChain: Promise = Promise.resolve() + private cleanupListMount?: () => void + private cleanupScrollObserver?: () => void + private suppressScrollChange = false + private ensureListSlideMountedFn?: (index: number) => void + private resizeObserver?: ResizeObserver + private windowResizeHandler?: () => void + private resizeRafId: number | null = null + private lastMeasuredContainerWidth = 0 + private mountedSlides = new Set() + private slideHandles = new Map() + private activeRenderMode: 'list' | 'slide' | null = null + private listOptions: Required = { + windowed: false, + batchSize: 12, + initialSlides: 4, + overscanViewport: 1.5, + showSlideLabels: false, + } + + constructor(container: HTMLElement, options?: ViewerOptions) { + super() + this.container = container + this.viewerOptions = options ?? {} + const zoomPercent = this.normalizeZoomPercent(options?.zoomPercent ?? 100) + this._fitMode = options?.fitMode ?? 'contain' + this.zoomFactor = zoomPercent / 100 + + // Register shorthand callbacks as event listeners + if (options?.onSlideChange) { + const cb = options.onSlideChange + this.addEventListener('slidechange', ((e: CustomEvent) => + cb(e.detail.index)) as EventListener) + } + if (options?.onSlideRendered) { + const cb = options.onSlideRendered + this.addEventListener('sliderendered', ((e: CustomEvent) => + cb(e.detail.index, e.detail.element)) as EventListener) + } + if (options?.onSlideError) { + const cb = options.onSlideError + this.addEventListener('slideerror', ((e: CustomEvent) => + cb(e.detail.index, e.detail.error)) as EventListener) + } + if (options?.onSlideUnmounted) { + const cb = options.onSlideUnmounted + this.addEventListener('slideunmounted', ((e: CustomEvent) => + cb(e.detail.index)) as EventListener) + } + if (options?.onNodeError) { + const cb = options.onNodeError + this.addEventListener('nodeerror', ((e: CustomEvent) => + cb(e.detail.nodeId, e.detail.error)) as EventListener) + } + if (options?.onRenderStart) { + const cb = options.onRenderStart + this.addEventListener('renderstart', () => cb()) + } + if (options?.onRenderComplete) { + const cb = options.onRenderComplete + this.addEventListener('rendercomplete', () => cb()) + } + } + + // ----------------------------------------------------------------------- + // Event dispatch helpers + // ----------------------------------------------------------------------- + + private emitRenderStart(): void { + this._isRendering = true + this.dispatchEvent(new Event('renderstart')) + } + + private emitRenderComplete(): void { + this._isRendering = false + this.dispatchEvent(new Event('rendercomplete')) + } + + private emitSlideChange(index: number): void { + this.dispatchEvent(new CustomEvent('slidechange', { detail: { index } })) + } + + private emitSlideRendered(index: number, element: HTMLElement): void { + this.dispatchEvent(new CustomEvent('sliderendered', { detail: { index, element } })) + } + + private emitSlideError(index: number, error: unknown): void { + this.dispatchEvent(new CustomEvent('slideerror', { detail: { index, error } })) + } + + private emitSlideUnmounted(index: number): void { + this.dispatchEvent(new CustomEvent('slideunmounted', { detail: { index } })) + } + + private emitNodeError(nodeId: string, error: unknown): void { + this.dispatchEvent(new CustomEvent('nodeerror', { detail: { nodeId, error } })) + } + + // ----------------------------------------------------------------------- + // Public: load / render modes + // ----------------------------------------------------------------------- + + /** + * Load a parsed presentation model. Does NOT render — call `renderList()` or + * `renderSlide()` afterwards. + */ + load(presentation: PresentationData): void { + this.presentation = presentation + this.setupAdaptiveResize() + } + + /** + * Render all slides in a scrollable list. + */ + async renderList(options?: ListRenderOptions): Promise { + this.activeRenderMode = 'list' + this.listOptions = { + windowed: options?.windowed ?? false, + batchSize: this.normalizeBatchSize(options?.batchSize ?? 12), + initialSlides: this.normalizePositiveInt(options?.initialSlides ?? 4, 4), + overscanViewport: this.normalizePositiveFloat(options?.overscanViewport ?? 1.5, 1.5), + showSlideLabels: options?.showSlideLabels ?? false, + } + await this.queueRender() + } + + /** + * Render a single slide (no built-in nav UI). + */ + async renderSlide(index?: number): Promise { + this.activeRenderMode = 'slide' + if (index !== undefined && this.presentation) { + this.currentSlide = Math.max(0, Math.min(index, this.presentation.slides.length - 1)) + } + await this.queueRender() + } + + // ----------------------------------------------------------------------- + // Instance open + // ----------------------------------------------------------------------- + + async open( + input: PreviewInput, + options?: { + renderMode?: 'list' | 'slide' + listOptions?: ListRenderOptions + signal?: AbortSignal + } + ): Promise { + const signal = options?.signal + const checkAborted = () => { + if (signal?.aborted) { + throw new DOMException('Preview aborted', 'AbortError') + } + } + + checkAborted() + + // Clean up previous state + this.destroy() + + const buffer = await normalizePreviewInput(input) + checkAborted() + + const files = await parseZip(buffer, this.viewerOptions.zipLimits) + checkAborted() + + const presentation = buildPresentation(files) + checkAborted() + + this.load(presentation) + + const renderMode = options?.renderMode ?? 'list' + if (renderMode === 'slide') { + await this.renderSlide(0) + } else { + await this.renderList(options?.listOptions) + } + + checkAborted() + } + + // ----------------------------------------------------------------------- + // Static factory + // ----------------------------------------------------------------------- + + static async open( + input: PreviewInput, + container: HTMLElement, + options?: ViewerOptions & { + renderMode?: 'list' | 'slide' + listOptions?: ListRenderOptions + signal?: AbortSignal + } + ): Promise { + const viewer = new PptxViewer(container, options) + await viewer.open(input, { + renderMode: options?.renderMode, + listOptions: options?.listOptions, + signal: options?.signal, + }) + return viewer + } + + // ----------------------------------------------------------------------- + // Navigation + // ----------------------------------------------------------------------- + + async goToSlide(index: number, scrollOptions?: ScrollIntoViewOptions): Promise { + if (!this.presentation) return + const prev = this.currentSlide + this.currentSlide = Math.max(0, Math.min(index, this.presentation.slides.length - 1)) + if (this.currentSlide !== prev) { + this.emitSlideChange(this.currentSlide) + } + if (this.activeRenderMode === 'slide') { + const { scale, displayWidth, displayHeight } = this.getDisplayMetrics() + this.renderSingleSlide(scale, displayWidth, displayHeight) + } else { + this.suppressScrollChange = true + await new Promise((resolve) => + requestAnimationFrame(() => { + this.suppressScrollChange = false + resolve() + }) + ) + this.ensureListSlideMountedFn?.(this.currentSlide) + const targetChild = this.container.querySelector( + `[data-slide-index="${this.currentSlide}"]` + ) + if (targetChild) { + targetChild.scrollIntoView(scrollOptions ?? { behavior: 'smooth', block: 'center' }) + } + } + } + + async setZoom(percent: number): Promise { + const normalized = this.normalizeZoomPercent(percent) + const nextFactor = normalized / 100 + if (nextFactor === this.zoomFactor) return + this.zoomFactor = nextFactor + await this.queueRender() + } + + async setFitMode(mode: FitMode): Promise { + if (this._fitMode === mode) return + this._fitMode = mode + if (mode === 'none') { + this.lastMeasuredContainerWidth = 0 + } + await this.queueRender() + } + + // ----------------------------------------------------------------------- + // Getters + // ----------------------------------------------------------------------- + + get presentationData(): PresentationData | null { + return this.presentation + } + + get slideCount(): number { + return this.presentation?.slides.length ?? 0 + } + + get slideWidth(): number { + return this.presentation?.width ?? 0 + } + + get slideHeight(): number { + return this.presentation?.height ?? 0 + } + + get currentSlideIndex(): number { + return this.currentSlide + } + + get isRendering(): boolean { + return this._isRendering + } + + get zoomPercent(): number { + return this.zoomFactor * 100 + } + + get fitMode(): FitMode { + return this._fitMode + } + + // ----------------------------------------------------------------------- + // Typed event helpers + // ----------------------------------------------------------------------- + + on( + type: K, + listener: (event: PptxViewerEventMap[K]) => void + ): this { + this.addEventListener(type, listener as EventListener) + return this + } + + off( + type: K, + listener: (event: PptxViewerEventMap[K]) => void + ): this { + this.removeEventListener(type, listener as EventListener) + return this + } + + isSlideMounted(index: number): boolean { + return this.mountedSlides.has(index) + } + + getMountedSlides(): number[] { + return [...this.mountedSlides].sort((a, b) => a - b) + } + + // ----------------------------------------------------------------------- + // External slide rendering + // ----------------------------------------------------------------------- + + /** + * Render a single slide into an external container element. + * Useful for React/Vue integration, thumbnail generation, etc. + * + * **Ownership:** The caller owns the returned {@link SlideHandle} and is + * responsible for calling `handle.dispose()` when the slide is no longer + * needed. `destroy()` does NOT automatically dispose externally-rendered + * handles. + */ + renderSlideToContainer( + index: number, + container: HTMLElement, + scale?: number + ): SlideHandle | null { + if (!this.presentation) return null + const slide = this.presentation.slides[index] + if (!slide) return null + + const handle = renderSlideInternal(this.presentation, slide, { + onNodeError: (nodeId, error) => this.emitNodeError(nodeId, error), + onNavigate: (target) => this.handleNavigate(target), + mediaUrlCache: this.mediaUrlCache, + chartInstances: this.chartInstances, + }) + + if (scale !== undefined && scale !== 1) { + handle.element.style.transform = `scale(${scale})` + handle.element.style.transformOrigin = 'top left' + } + + container.appendChild(handle.element) + this.emitSlideRendered(index, handle.element) + return handle + } + + /** + * Hook called after rendering a single slide. Override in subclasses to + * append additional UI (e.g. navigation buttons). + */ + protected afterSingleSlideRender(): void { + // No-op in base class + } + + // ----------------------------------------------------------------------- + // Cleanup + // ----------------------------------------------------------------------- + + destroy(): void { + this.teardownAdaptiveResize() + this.cleanupScrollObserver?.() + this.cleanupScrollObserver = undefined + this.cleanupListMount?.() + this.cleanupListMount = undefined + this.ensureListSlideMountedFn = undefined + this.mountedSlides.clear() + for (const handle of this.slideHandles.values()) { + handle.dispose() + } + this.slideHandles.clear() + this.disposeAllCharts() + for (const url of this.mediaUrlCache.values()) { + URL.revokeObjectURL(url) + } + this.mediaUrlCache.clear() + this.container.innerHTML = '' + this.presentation = null + this.activeRenderMode = null + } + + [Symbol.dispose](): void { + this.destroy() + } + + // ----------------------------------------------------------------------- + // Internal: rendering pipeline + // ----------------------------------------------------------------------- + + private normalizeZoomPercent(percent: number): number { + if (!Number.isFinite(percent)) return 100 + return Math.max(10, Math.min(400, percent)) + } + + private normalizeBatchSize(val: number): number { + return Number.isInteger(val) && val > 0 ? val : 12 + } + + private normalizePositiveInt(val: number, fallback: number): number { + return Number.isInteger(val) && val > 0 ? val : fallback + } + + private normalizePositiveFloat(val: number, fallback: number): number { + return Number.isFinite(val) && val > 0 ? val : fallback + } + + private getDisplayMetrics(): { scale: number; displayWidth: number; displayHeight: number } { + if (!this.presentation) { + return { scale: 1, displayWidth: 0, displayHeight: 0 } + } + const fitWidth = this.viewerOptions.width ?? (this.container.clientWidth || 960) + if (this._fitMode === 'contain' && this.viewerOptions.width === undefined) { + this.lastMeasuredContainerWidth = fitWidth + } + const fitScale = this._fitMode === 'contain' ? fitWidth / this.presentation.width : 1 + const scale = fitScale * this.zoomFactor + return { + scale, + displayWidth: this.presentation.width * scale, + displayHeight: this.presentation.height * scale, + } + } + + private async queueRender(): Promise { + this.renderChain = this.renderChain.then(async () => { + if (!this.presentation) return + this.emitRenderStart() + try { + const { scale, displayWidth, displayHeight } = this.getDisplayMetrics() + + this.cleanupScrollObserver?.() + this.cleanupScrollObserver = undefined + this.cleanupListMount?.() + this.cleanupListMount = undefined + this.ensureListSlideMountedFn = undefined + this.mountedSlides.clear() + for (const handle of this.slideHandles.values()) { + handle.dispose() + } + this.slideHandles.clear() + this.disposeAllCharts() + this.container.innerHTML = '' + this.container.style.position = 'relative' + + if (this.activeRenderMode === 'slide') { + this.renderSingleSlide(scale, displayWidth, displayHeight) + } else if (this.listOptions.windowed) { + await this.renderAllSlidesWindowed(scale, displayWidth, displayHeight) + } else { + await this.renderAllSlidesFull(scale, displayWidth, displayHeight) + } + + // Post-render width correction: appending slides may cause a scrollbar + // to appear on the page, narrowing the container. If the measured width + // changed, patch wrapper sizes and scale transforms in-place so content + // is not clipped by the (now narrower) container. + if (this.activeRenderMode !== 'slide') { + this.correctListMetricsIfNeeded() + } + + this.emitSlideChange(this.currentSlide) + } finally { + this.emitRenderComplete() + } + }) + return this.renderChain + } + + private handleContainerResize(): void { + if (!this.presentation) return + if (this._fitMode !== 'contain') return + if (this.viewerOptions.width !== undefined) return + + const nextWidth = this.container.clientWidth || 0 + if (!nextWidth || nextWidth === this.lastMeasuredContainerWidth) return + this.lastMeasuredContainerWidth = nextWidth + + if (this.resizeRafId !== null) { + cancelAnimationFrame(this.resizeRafId) + } + this.resizeRafId = requestAnimationFrame(() => { + this.resizeRafId = null + void this.queueRender() + }) + } + + private setupAdaptiveResize(): void { + this.teardownAdaptiveResize() + + if (typeof ResizeObserver !== 'undefined') { + const observer = new ResizeObserver(() => this.handleContainerResize()) + observer.observe(this.container) + this.resizeObserver = observer + return + } + + this.windowResizeHandler = () => this.handleContainerResize() + window.addEventListener('resize', this.windowResizeHandler) + } + + private teardownAdaptiveResize(): void { + this.resizeObserver?.disconnect() + this.resizeObserver = undefined + if (this.windowResizeHandler) { + window.removeEventListener('resize', this.windowResizeHandler) + this.windowResizeHandler = undefined + } + if (this.resizeRafId !== null) { + cancelAnimationFrame(this.resizeRafId) + this.resizeRafId = null + } + } + + private disposeAllCharts(): void { + for (const chart of this.chartInstances) { + if (!chart.isDisposed()) { + chart.dispose() + } + } + this.chartInstances.clear() + } + + private createListSlideItem( + index: number, + displayWidth: number, + displayHeight: number + ): { item: HTMLDivElement; wrapper: HTMLDivElement } { + const item = document.createElement('div') + item.dataset.slideIndex = String(index) + item.style.cssText = 'width: fit-content; margin: 0 auto 20px;' + + const wrapper = document.createElement('div') + wrapper.style.cssText = ` + width: ${displayWidth}px; + height: ${displayHeight}px; + box-shadow: 0 2px 8px rgba(0,0,0,0.15); + overflow: hidden; + position: relative; + background: #fff; + ` + + item.appendChild(wrapper) + + if (this.listOptions.showSlideLabels) { + const label = document.createElement('div') + label.style.cssText = 'text-align: center; padding: 4px; font-size: 12px; color: #666;' + label.textContent = `Slide ${index + 1}` + item.appendChild(label) + } + return { item, wrapper } + } + + private mountListSlide( + index: number, + wrapper: HTMLDivElement, + scale: number, + _displayWidth: number, + _displayHeight: number + ): void { + if (!this.presentation) return + if (wrapper.dataset.mounted === '1') return + wrapper.dataset.mounted = '1' + wrapper.innerHTML = '' + this.mountedSlides.add(index) + + const slide = this.presentation.slides[index] + try { + const handle = renderSlideInternal(this.presentation, slide, { + onNodeError: (nodeId, error) => this.emitNodeError(nodeId, error), + onNavigate: (target) => this.handleNavigate(target), + mediaUrlCache: this.mediaUrlCache, + chartInstances: this.chartInstances, + }) + + this.slideHandles.set(index, handle) + handle.element.style.transform = `scale(${scale})` + handle.element.style.transformOrigin = 'top left' + wrapper.appendChild(handle.element) + this.emitSlideRendered(index, handle.element) + } catch (e) { + this.emitSlideError(index, e) + wrapper.style.background = '#fff3f3' + wrapper.style.display = 'flex' + wrapper.style.alignItems = 'center' + wrapper.style.justifyContent = 'center' + wrapper.style.border = '2px dashed #ff6b6b' + wrapper.style.color = '#cc0000' + wrapper.style.fontSize = '14px' + wrapper.textContent = `Slide ${index + 1}: Render Error - ${e instanceof Error ? e.message : String(e)}` + } + } + + private unmountListSlide(index: number, wrapper: HTMLDivElement, displayHeight: number): void { + if (wrapper.dataset.mounted !== '1') return + wrapper.dataset.mounted = '0' + this.mountedSlides.delete(index) + const handle = this.slideHandles.get(index) + if (handle) { + handle.dispose() + this.slideHandles.delete(index) + } + wrapper.innerHTML = '' + wrapper.style.background = '#fff' + wrapper.style.display = '' + wrapper.style.alignItems = '' + wrapper.style.justifyContent = '' + wrapper.style.border = '' + wrapper.style.color = '' + wrapper.style.fontSize = '' + wrapper.style.height = `${displayHeight}px` + this.emitSlideUnmounted(index) + } + + private async renderAllSlidesFull( + scale: number, + displayWidth: number, + displayHeight: number + ): Promise { + if (!this.presentation) return + const batchSize = this.listOptions.batchSize + let batchFragment = document.createDocumentFragment() + + for (let i = 0; i < this.presentation.slides.length; i++) { + const { item, wrapper } = this.createListSlideItem(i, displayWidth, displayHeight) + this.mountListSlide(i, wrapper, scale, displayWidth, displayHeight) + batchFragment.appendChild(item) + + if ((i + 1) % batchSize === 0) { + this.container.appendChild(batchFragment) + batchFragment = document.createDocumentFragment() + await new Promise((resolve) => requestAnimationFrame(() => resolve())) + } + } + + if (batchFragment.childNodes.length > 0) { + this.container.appendChild(batchFragment) + } + + this.setupScrollSlideTracking() + } + + private async renderAllSlidesWindowed( + scale: number, + displayWidth: number, + displayHeight: number + ): Promise { + if (!this.presentation) return + const batchSize = this.listOptions.batchSize + let batchFragment = document.createDocumentFragment() + const wrappers: HTMLDivElement[] = [] + + for (let i = 0; i < this.presentation.slides.length; i++) { + const { item, wrapper } = this.createListSlideItem(i, displayWidth, displayHeight) + wrappers.push(wrapper) + batchFragment.appendChild(item) + + if ((i + 1) % batchSize === 0) { + this.container.appendChild(batchFragment) + batchFragment = document.createDocumentFragment() + await new Promise((resolve) => requestAnimationFrame(() => resolve())) + } + } + + if (batchFragment.childNodes.length > 0) { + this.container.appendChild(batchFragment) + } + + const mount = (idx: number): void => { + if (idx < 0 || idx >= wrappers.length) return + this.mountListSlide(idx, wrappers[idx], scale, displayWidth, displayHeight) + } + const unmount = (idx: number): void => { + if (idx < 0 || idx >= wrappers.length) return + this.unmountListSlide(idx, wrappers[idx], displayHeight) + } + + const initial = this.listOptions.initialSlides + for (let i = 0; i < Math.min(initial, wrappers.length); i++) mount(i) + this.ensureListSlideMountedFn = mount + + const IO = window.IntersectionObserver + if (!IO) { + for (let i = initial; i < wrappers.length; i++) mount(i) + this.setupScrollSlideTracking() + return + } + + const ioRoot = this.viewerOptions.scrollContainer ?? null + const overscanViewport = this.listOptions.overscanViewport + const rootHeight = ioRoot ? ioRoot.clientHeight : window.innerHeight + const rootMargin = `${Math.round(rootHeight * overscanViewport)}px 0px` + const observer = new IO( + (entries) => { + for (const entry of entries) { + const item = (entry.target as HTMLElement).parentElement + const index = Number(item?.dataset.slideIndex ?? '-1') + if (Number.isNaN(index) || index < 0) continue + if (entry.isIntersecting) { + mount(index) + } else { + unmount(index) + } + } + }, + { root: ioRoot, rootMargin, threshold: 0 } + ) + + wrappers.forEach((wrapper) => { + observer.observe(wrapper) + }) + + this.cleanupListMount = () => { + observer.disconnect() + this.ensureListSlideMountedFn = undefined + } + + this.setupScrollSlideTracking() + } + + private setupScrollSlideTracking(): void { + if (this.activeRenderMode === 'slide') return + + const IO = window.IntersectionObserver + if (!IO) return + + const items = this.container.querySelectorAll('[data-slide-index]') + if (!items.length) return + + const ratios = new Map() + const ioRoot = this.viewerOptions.scrollContainer ?? null + + const observer = new IO( + (entries) => { + for (const entry of entries) { + const idx = Number((entry.target as HTMLElement).dataset.slideIndex ?? '-1') + if (Number.isNaN(idx) || idx < 0) continue + ratios.set(idx, entry.intersectionRatio) + } + + if (this.suppressScrollChange) return + + let bestIdx = -1 + let bestRatio = -1 + for (const [idx, ratio] of ratios) { + if (ratio > bestRatio) { + bestRatio = ratio + bestIdx = idx + } + } + + if (bestIdx >= 0 && bestIdx !== this.currentSlide) { + this.currentSlide = bestIdx + this.emitSlideChange(bestIdx) + } + }, + { root: ioRoot, threshold: [0, 0.25, 0.5, 0.75, 1.0] } + ) + + items.forEach((item) => observer.observe(item)) + + this.cleanupScrollObserver = () => { + observer.disconnect() + } + } + + private renderSingleSlide(scale: number, displayWidth: number, displayHeight: number): void { + if (!this.presentation) return + + const slide = this.presentation.slides[this.currentSlide] + if (!slide) return + + for (const handle of this.slideHandles.values()) { + handle.dispose() + } + this.slideHandles.clear() + this.disposeAllCharts() + this.container.innerHTML = '' + this.mountedSlides.clear() + this.mountedSlides.add(this.currentSlide) + + const wrapper = document.createElement('div') + wrapper.style.cssText = ` + width: ${displayWidth}px; height: ${displayHeight}px; + margin: 0 auto; overflow: hidden; position: relative; + box-shadow: 0 2px 8px rgba(0,0,0,0.15); + ` + + try { + const handle = renderSlideInternal(this.presentation, slide, { + onNodeError: (nodeId, error) => this.emitNodeError(nodeId, error), + onNavigate: (target) => this.handleNavigate(target), + mediaUrlCache: this.mediaUrlCache, + chartInstances: this.chartInstances, + }) + this.slideHandles.set(this.currentSlide, handle) + handle.element.style.transform = `scale(${scale})` + handle.element.style.transformOrigin = 'top left' + wrapper.appendChild(handle.element) + this.emitSlideRendered(this.currentSlide, handle.element) + } catch (e) { + this.emitSlideError(this.currentSlide, e) + wrapper.style.background = '#fff3f3' + wrapper.style.display = 'flex' + wrapper.style.alignItems = 'center' + wrapper.style.justifyContent = 'center' + wrapper.style.border = '2px dashed #ff6b6b' + wrapper.style.color = '#cc0000' + wrapper.style.fontSize = '14px' + wrapper.textContent = `Slide ${this.currentSlide + 1}: Render Error - ${e instanceof Error ? e.message : String(e)}` + } + + this.container.appendChild(wrapper) + this.afterSingleSlideRender() + } + + /** + * After list-mode rendering, a scrollbar may appear on the page body + * (or a scroll ancestor), narrowing the container. If the container's + * clientWidth now differs from the width used to compute the initial + * scale, patch every wrapper's dimensions and each slide element's + * transform in-place — no DOM rebuild required. + */ + private correctListMetricsIfNeeded(): void { + if (!this.presentation) return + if (this._fitMode !== 'contain') return + if (this.viewerOptions.width !== undefined) return + + const currentWidth = this.container.clientWidth || 0 + if (!currentWidth || currentWidth === this.lastMeasuredContainerWidth) return + + // Width changed — recompute metrics + this.lastMeasuredContainerWidth = currentWidth + const fitScale = currentWidth / this.presentation.width + const newScale = fitScale * this.zoomFactor + const newDisplayW = this.presentation.width * newScale + const newDisplayH = this.presentation.height * newScale + + // Patch every slide wrapper in the list + const items = this.container.querySelectorAll('[data-slide-index]') + for (const item of items) { + const wrapper = item.firstElementChild as HTMLElement | null + if (!wrapper) continue + wrapper.style.width = `${newDisplayW}px` + wrapper.style.height = `${newDisplayH}px` + // The slide element is the first child of the wrapper + const slideEl = wrapper.firstElementChild as HTMLElement | null + if (slideEl) { + slideEl.style.transform = `scale(${newScale})` + } + } + } + + private handleNavigate(target: { slideIndex?: number; url?: string }): void { + if (target.slideIndex !== undefined) { + this.goToSlide(target.slideIndex) + } else if (target.url && isAllowedExternalUrl(target.url)) { + window.open(target.url, '_blank', 'noopener,noreferrer') + } + } +} + +// ----------------------------------------------------------------------- +// Standalone helper (shared with Renderer.ts) +// ----------------------------------------------------------------------- + +export async function normalizePreviewInput(input: PreviewInput): Promise { + if (input instanceof ArrayBuffer) return input + if (input instanceof Uint8Array) { + const bytes = new Uint8Array(input.byteLength) + bytes.set(input) + return bytes.buffer + } + + const blobLike = input as Blob & { arrayBuffer?: () => Promise } + if (typeof blobLike.arrayBuffer === 'function') { + return blobLike.arrayBuffer() + } + + if (typeof FileReader !== 'undefined') { + return new Promise((resolve, reject) => { + const reader = new FileReader() + reader.onload = () => resolve(reader.result as ArrayBuffer) + reader.onerror = () => reject(reader.error ?? new Error('Failed to read Blob input')) + reader.readAsArrayBuffer(blobLike) + }) + } + + if (typeof Response !== 'undefined') { + return new Response(blobLike).arrayBuffer() + } + + throw new Error('Blob preview input is not supported in this runtime') +} diff --git a/apps/sim/lib/pptx-renderer/export/serialize-presentation.ts b/apps/sim/lib/pptx-renderer/export/serialize-presentation.ts new file mode 100644 index 00000000000..ad5d934294f --- /dev/null +++ b/apps/sim/lib/pptx-renderer/export/serialize-presentation.ts @@ -0,0 +1,201 @@ +/** + * Serialize PresentationData into a plain JSON-serializable structure. + * Strips all SafeXmlNode references and re-parses group children. + */ + +import type { BaseNodeData } from '../model/nodes/base-node' +import type { ChartNodeData } from '../model/nodes/chart-node' +import { type GroupNodeData, parseGroupNode } from '../model/nodes/group-node' +import { type PicNodeData, parsePicNode } from '../model/nodes/pic-node' +import { parseShapeNode, type ShapeNodeData, type TextBody } from '../model/nodes/shape-node' +import { + parseTableNode, + type TableCell, + type TableNodeData, + type TableRow, +} from '../model/nodes/table-node' +import type { PresentationData } from '../model/presentation' +import type { SlideNode } from '../model/slide' +import type { SafeXmlNode } from '../parser/xml-parser' + +// --------------------------------------------------------------------------- +// Serialized Types (JSON-safe) +// --------------------------------------------------------------------------- + +interface SerializedParagraph { + level: number + text: string +} + +interface SerializedTextBody { + paragraphs: SerializedParagraph[] + totalText: string +} + +interface SerializedCell { + text: string + gridSpan: number + rowSpan: number +} + +interface SerializedRow { + height: number + cells: SerializedCell[] +} + +export interface SerializedNode { + id: string + name: string + nodeType: string + position: { x: number; y: number } + size: { w: number; h: number } + rotation: number + flipH: boolean + flipV: boolean + presetGeometry?: string + textBody?: SerializedTextBody + columns?: number[] + rows?: SerializedRow[] + tableStyleId?: string + blipEmbed?: string + chartPath?: string + children?: SerializedNode[] +} + +export interface SerializedSlide { + index: number + nodes: SerializedNode[] +} + +export interface SerializedPresentation { + width: number + height: number + slideCount: number + slides: SerializedSlide[] +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function serializeTextBody(tb: TextBody | undefined): SerializedTextBody | undefined { + if (!tb) return undefined + const paragraphs: SerializedParagraph[] = tb.paragraphs.map((p) => ({ + level: p.level, + text: p.runs.map((r) => r.text).join(''), + })) + const totalText = paragraphs.map((p) => p.text).join('\n') + if (!totalText.trim()) return undefined + return { paragraphs, totalText } +} + +function serializeCell(cell: TableCell): SerializedCell { + const text = cell.textBody + ? cell.textBody.paragraphs.map((p) => p.runs.map((r) => r.text).join('')).join('\n') + : '' + return { text, gridSpan: cell.gridSpan, rowSpan: cell.rowSpan } +} + +function serializeRow(row: TableRow): SerializedRow { + return { + height: row.height, + cells: row.cells.map(serializeCell), + } +} + +/** + * Parse a raw XML child node from a group into a typed node. + */ +function parseGroupChild(childXml: SafeXmlNode): BaseNodeData | undefined { + const tag = childXml.localName + switch (tag) { + case 'sp': + case 'cxnSp': + return parseShapeNode(childXml) + case 'pic': + return parsePicNode(childXml) + case 'grpSp': + return parseGroupNode(childXml) + case 'graphicFrame': { + const graphic = childXml.child('graphic') + const graphicData = graphic.child('graphicData') + if (graphicData.child('tbl').exists()) { + return parseTableNode(childXml) + } + return undefined + } + default: + return undefined + } +} + +function serializeNode(node: SlideNode | BaseNodeData): SerializedNode { + const base: SerializedNode = { + id: node.id, + name: node.name, + nodeType: node.nodeType, + position: { x: node.position.x, y: node.position.y }, + size: { w: node.size.w, h: node.size.h }, + rotation: node.rotation, + flipH: node.flipH, + flipV: node.flipV, + } + + switch (node.nodeType) { + case 'shape': { + const s = node as ShapeNodeData + base.presetGeometry = s.presetGeometry + base.textBody = serializeTextBody(s.textBody) + break + } + case 'picture': { + const p = node as PicNodeData + base.blipEmbed = p.blipEmbed + break + } + case 'table': { + const t = node as TableNodeData + base.columns = [...t.columns] + base.rows = t.rows.map(serializeRow) + base.tableStyleId = t.tableStyleId + break + } + case 'chart': { + const c = node as ChartNodeData + base.chartPath = c.chartPath + break + } + case 'group': { + const g = node as GroupNodeData + const children: SerializedNode[] = [] + for (const childXml of g.children) { + try { + const parsed = parseGroupChild(childXml) + if (parsed) children.push(serializeNode(parsed)) + } catch { + // skip unparseable group children + } + } + base.children = children + break + } + } + + return base +} + +// --------------------------------------------------------------------------- +// Main Export +// --------------------------------------------------------------------------- + +export function serializePresentation(pres: PresentationData): SerializedPresentation { + return { + width: pres.width, + height: pres.height, + slideCount: pres.slides.length, + slides: pres.slides.map((slide, i) => ({ + index: i, + nodes: slide.nodes.map(serializeNode), + })), + } +} diff --git a/apps/sim/lib/pptx-renderer/index.ts b/apps/sim/lib/pptx-renderer/index.ts new file mode 100644 index 00000000000..318b5e881d4 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/index.ts @@ -0,0 +1,18 @@ +/** + * Internal PPTX source ported from https://github.com/aiden0z/pptx-renderer and + * adapted for use in Sim. + */ +export type { + FitMode, + ListRenderOptions, + PptxViewerEventMap, + PreviewInput, + ViewerOptions, +} from './core/viewer' +export { PptxViewer } from './core/viewer' +export type { PresentationData } from './model/presentation' +export { buildPresentation } from './model/presentation' +export type { PptxFiles, ZipParseLimits } from './parser/zip-parser' +export { parseZip } from './parser/zip-parser' +export type { SlideHandle, SlideRendererOptions } from './renderer/slide-renderer' +export { renderSlide } from './renderer/slide-renderer' diff --git a/apps/sim/lib/pptx-renderer/model/layout.ts b/apps/sim/lib/pptx-renderer/model/layout.ts new file mode 100644 index 00000000000..d553abd322f --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/layout.ts @@ -0,0 +1,186 @@ +/** + * Slide layout parser — extracts color map override, background, + * and placeholder shapes from a p:sldLayout XML. + */ + +import { emuToPx } from '../parser/units' +import type { SafeXmlNode } from '../parser/xml-parser' +import { isPlaceholder, parseAllAttributes } from './xml-helpers' + +export interface PlaceholderXfrm { + position: { x: number; y: number } + size: { w: number; h: number } +} + +export interface PlaceholderEntry { + node: SafeXmlNode + /** When placeholder is inside a group, position/size in slide space (px). */ + absoluteXfrm?: PlaceholderXfrm +} + +export interface LayoutData { + colorMapOverride?: Map + background?: SafeXmlNode + placeholders: PlaceholderEntry[] + spTree: SafeXmlNode + rels: Map + /** When false, shapes from the slide master should NOT be rendered on this layout. */ + showMasterSp: boolean +} + +function getShapeXfrmInEmu( + node: SafeXmlNode +): { offX: number; offY: number; cx: number; cy: number } | null { + const spPr = node.child('spPr') + if (!spPr.exists()) return null + const xfrm = spPr.child('xfrm') + if (!xfrm.exists()) return null + const off = xfrm.child('off') + const ext = xfrm.child('ext') + const offX = off.numAttr('x') ?? 0 + const offY = off.numAttr('y') ?? 0 + const cx = ext.numAttr('cx') ?? 0 + const cy = ext.numAttr('cy') ?? 0 + return { offX, offY, cx, cy } +} + +function getGroupXfrmInEmu(grpSp: SafeXmlNode): { + offX: number + offY: number + cx: number + cy: number + chOffX: number + chOffY: number + chExtCx: number + chExtCy: number +} | null { + const grpSpPr = grpSp.child('grpSpPr') + if (!grpSpPr.exists()) return null + const xfrm = grpSpPr.child('xfrm') + if (!xfrm.exists()) return null + const off = xfrm.child('off') + const ext = xfrm.child('ext') + const chOff = xfrm.child('chOff') + const chExt = xfrm.child('chExt') + const offX = off.numAttr('x') ?? 0 + const offY = off.numAttr('y') ?? 0 + const cx = ext.numAttr('cx') ?? 0 + const cy = ext.numAttr('cy') ?? 0 + // OOXML: when chOff/chExt omitted, child box equals group box (chOff=0,0 and chExt=ext) + const chOffX = chOff.exists() ? (chOff.numAttr('x') ?? 0) : 0 + const chOffY = chOff.exists() ? (chOff.numAttr('y') ?? 0) : 0 + const chExtCx = chExt.exists() ? (chExt.numAttr('cx') ?? cx) : cx + const chExtCy = chExt.exists() ? (chExt.numAttr('cy') ?? cy) : cy + return { + offX, + offY, + cx, + cy, + chOffX, + chOffY, + chExtCx: chExtCx > 0 ? chExtCx : 1, + chExtCy: chExtCy > 0 ? chExtCy : 1, + } +} + +/** + * Recursively collect placeholders; when inside a group, compute position/size in slide space. + */ +function extractPlaceholdersRecursive( + spTree: SafeXmlNode, + groupTransform: { offX: number; offY: number; scaleX: number; scaleY: number } | null +): PlaceholderEntry[] { + const out: PlaceholderEntry[] = [] + for (const child of spTree.allChildren()) { + if (child.localName === 'grpSp') { + const gx = getGroupXfrmInEmu(child) + if (gx && gx.chExtCx > 0 && gx.chExtCy > 0) { + const scaleX = gx.cx / gx.chExtCx + const scaleY = gx.cy / gx.chExtCy + const baseOffX = gx.offX - gx.chOffX * scaleX + const baseOffY = gx.offY - gx.chOffY * scaleY + const nextTransform = groupTransform + ? { + offX: groupTransform.offX + baseOffX * groupTransform.scaleX, + offY: groupTransform.offY + baseOffY * groupTransform.scaleY, + scaleX: groupTransform.scaleX * scaleX, + scaleY: groupTransform.scaleY * scaleY, + } + : { offX: baseOffX, offY: baseOffY, scaleX, scaleY } + const nested = extractPlaceholdersRecursive(child, nextTransform) + out.push(...nested) + } else { + out.push(...extractPlaceholdersRecursive(child, groupTransform)) + } + continue + } + if (!isPlaceholder(child)) continue + const sx = getShapeXfrmInEmu(child) + if (!sx) { + out.push({ node: child }) + continue + } + if (groupTransform) { + const absOffX = groupTransform.offX + sx.offX * groupTransform.scaleX + const absOffY = groupTransform.offY + sx.offY * groupTransform.scaleY + const absCx = sx.cx * groupTransform.scaleX + const absCy = sx.cy * groupTransform.scaleY + out.push({ + node: child, + absoluteXfrm: { + position: { x: emuToPx(absOffX), y: emuToPx(absOffY) }, + size: { w: emuToPx(absCx), h: emuToPx(absCy) }, + }, + }) + } else { + out.push({ + node: child, + absoluteXfrm: { + position: { x: emuToPx(sx.offX), y: emuToPx(sx.offY) }, + size: { w: emuToPx(sx.cx), h: emuToPx(sx.cy) }, + }, + }) + } + } + return out +} + +/** + * Parse a slide layout XML root (`p:sldLayout`) into LayoutData. + */ +export function parseLayout(root: SafeXmlNode): LayoutData { + const cSld = root.child('cSld') + + // --- Background --- + const bg = cSld.child('bg') + const background = bg.exists() ? bg : undefined + + // --- Shape tree --- + const spTree = cSld.child('spTree') + + // --- Color map override --- + let colorMapOverride: Map | undefined + const clrMapOvr = root.child('clrMapOvr') + if (clrMapOvr.exists()) { + const overrideMapping = clrMapOvr.child('overrideClrMapping') + if (overrideMapping.exists()) { + colorMapOverride = parseAllAttributes(overrideMapping) + } + } + + // --- Placeholders (recursive so we find title/body inside grpSp; resolve position in slide space) --- + const placeholders = extractPlaceholdersRecursive(spTree, null) + + // --- showMasterSp: if "0", master shapes should not be rendered for this layout --- + const showMasterSpAttr = root.attr('showMasterSp') + const showMasterSp = showMasterSpAttr !== '0' + + return { + colorMapOverride, + background, + placeholders, + spTree, + rels: new Map(), // populated later by buildPresentation + showMasterSp, + } +} diff --git a/apps/sim/lib/pptx-renderer/model/master.ts b/apps/sim/lib/pptx-renderer/model/master.ts new file mode 100644 index 00000000000..70648c41e78 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/master.ts @@ -0,0 +1,80 @@ +/** + * Slide master parser — extracts color map, background, text styles, + * and placeholder shapes from a p:sldMaster XML. + */ + +import type { SafeXmlNode } from '../parser/xml-parser' +import { isPlaceholder, parseAllAttributes } from './xml-helpers' + +export interface MasterData { + colorMap: Map + background?: SafeXmlNode + textStyles: { + titleStyle?: SafeXmlNode + bodyStyle?: SafeXmlNode + otherStyle?: SafeXmlNode + } + defaultTextStyle?: SafeXmlNode + placeholders: SafeXmlNode[] + spTree: SafeXmlNode + rels: Map +} + +/** + * Extract placeholder shape nodes from an spTree node. + * A shape is considered a placeholder if it has a `p:ph` element in its nvPr. + */ +function extractPlaceholders(spTree: SafeXmlNode): SafeXmlNode[] { + const placeholders: SafeXmlNode[] = [] + const allChildren = spTree.allChildren() + for (const child of allChildren) { + if (isPlaceholder(child)) { + placeholders.push(child) + } + } + return placeholders +} + +/** + * Parse a slide master XML root (`p:sldMaster`) into MasterData. + */ +export function parseMaster(root: SafeXmlNode): MasterData { + const cSld = root.child('cSld') + + // --- Background --- + const bg = cSld.child('bg') + const background = bg.exists() ? bg : undefined + + // --- Shape tree --- + const spTree = cSld.child('spTree') + + // --- Color map --- + const clrMap = root.child('clrMap') + const colorMap = parseAllAttributes(clrMap) + + // --- Text styles --- + const txStyles = root.child('txStyles') + const titleStyle = txStyles.child('titleStyle') + const bodyStyle = txStyles.child('bodyStyle') + const otherStyle = txStyles.child('otherStyle') + + // --- Default text style --- + const defaultTextStyle = root.child('defaultTextStyle') + + // --- Placeholders --- + const placeholders = extractPlaceholders(spTree) + + return { + colorMap, + background, + textStyles: { + titleStyle: titleStyle.exists() ? titleStyle : undefined, + bodyStyle: bodyStyle.exists() ? bodyStyle : undefined, + otherStyle: otherStyle.exists() ? otherStyle : undefined, + }, + defaultTextStyle: defaultTextStyle.exists() ? defaultTextStyle : undefined, + placeholders, + spTree, + rels: new Map(), // populated later by buildPresentation + } +} diff --git a/apps/sim/lib/pptx-renderer/model/nodes/base-node.ts b/apps/sim/lib/pptx-renderer/model/nodes/base-node.ts new file mode 100644 index 00000000000..dfe73051517 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/nodes/base-node.ts @@ -0,0 +1,169 @@ +/** + * Base node types and property parser shared by all slide node kinds. + */ + +import { angleToDeg, emuToPx } from '../../parser/units' +import type { SafeXmlNode } from '../../parser/xml-parser' + +export type NodeType = 'shape' | 'picture' | 'table' | 'group' | 'chart' | 'unknown' + +export interface Position { + x: number + y: number +} + +export interface Size { + w: number + h: number +} + +export interface PlaceholderInfo { + type?: string + idx?: number +} + +/** Shape-level hyperlink click action (from cNvPr > a:hlinkClick). */ +export interface HlinkAction { + /** Action URI, e.g. "ppaction://hlinksldjump", "ppaction://hlinkpres", or empty for URL links. */ + action?: string + /** Relationship ID for the target (slide, URL, etc.). */ + rId?: string + /** Optional tooltip text. */ + tooltip?: string +} + +export interface BaseNodeData { + id: string + name: string + nodeType: NodeType + position: Position + size: Size + rotation: number + flipH: boolean + flipV: boolean + placeholder?: PlaceholderInfo + /** Shape-level hyperlink/click action (action buttons, clickable shapes). */ + hlinkClick?: HlinkAction + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + source: SafeXmlNode +} + +/** + * Try to find the non-visual properties container in the given node. + * PPTX uses different wrapper names depending on the shape kind: + * p:nvSpPr (shapes/connectors), p:nvPicPr (pictures), + * p:nvGrpSpPr (groups), p:nvGraphicFramePr (tables/charts). + */ +function findNvProps(node: SafeXmlNode): { cNvPr: SafeXmlNode; nvPr: SafeXmlNode } { + const wrappers = ['nvSpPr', 'nvPicPr', 'nvGrpSpPr', 'nvGraphicFramePr', 'nvCxnSpPr'] + for (const name of wrappers) { + const wrapper = node.child(name) + if (wrapper.exists()) { + return { + cNvPr: wrapper.child('cNvPr'), + nvPr: wrapper.child('nvPr'), + } + } + } + return { + cNvPr: node.child('cNvPr'), + nvPr: node.child('nvPr'), + } +} + +/** + * Find the transform (xfrm) node. Shapes use `p:spPr > a:xfrm`, + * groups use `p:grpSpPr > a:xfrm`, graphic frames use `p:xfrm`. + */ +function findXfrm(node: SafeXmlNode): SafeXmlNode { + // Try spPr first (most shapes) + const spPr = node.child('spPr') + if (spPr.exists()) { + const xfrm = spPr.child('xfrm') + if (xfrm.exists()) return xfrm + } + + // Try grpSpPr (groups) + const grpSpPr = node.child('grpSpPr') + if (grpSpPr.exists()) { + const xfrm = grpSpPr.child('xfrm') + if (xfrm.exists()) return xfrm + } + + // Try direct xfrm (graphic frames) + const directXfrm = node.child('xfrm') + if (directXfrm.exists()) return directXfrm + + // Return empty node — all reads will return defaults + return node.child('__nonexistent__') +} + +/** + * Parse placeholder info from nvPr > p:ph. + */ +function parsePlaceholder(nvPr: SafeXmlNode): PlaceholderInfo | undefined { + const ph = nvPr.child('ph') + if (!ph.exists()) return undefined + + const type = ph.attr('type') + const idx = ph.numAttr('idx') + + return { type, idx } +} + +/** + * Parse the base properties common to all node types from a shape-like XML node. + * Returns everything except `nodeType`, which the caller must set. + */ +export function parseBaseProps(spNode: SafeXmlNode): Omit { + const { cNvPr, nvPr } = findNvProps(spNode) + + const id = cNvPr.attr('id') ?? '' + const name = cNvPr.attr('name') ?? '' + + // --- Transform --- + const xfrm = findXfrm(spNode) + const off = xfrm.child('off') + const ext = xfrm.child('ext') + + const position: Position = { + x: emuToPx(off.numAttr('x') ?? 0), + y: emuToPx(off.numAttr('y') ?? 0), + } + + const size: Size = { + w: emuToPx(ext.numAttr('cx') ?? 0), + h: emuToPx(ext.numAttr('cy') ?? 0), + } + + const rotation = angleToDeg(xfrm.numAttr('rot') ?? 0) + const flipH = xfrm.attr('flipH') === '1' || xfrm.attr('flipH') === 'true' + const flipV = xfrm.attr('flipV') === '1' || xfrm.attr('flipV') === 'true' + + // --- Placeholder --- + const placeholder = parsePlaceholder(nvPr) + + // --- Shape-level hyperlink action (cNvPr > a:hlinkClick) --- + let hlinkClick: HlinkAction | undefined + const hlinkNode = cNvPr.child('hlinkClick') + if (hlinkNode.exists()) { + hlinkClick = { + action: hlinkNode.attr('action') ?? undefined, + rId: hlinkNode.attr('id') ?? hlinkNode.attr('r:id') ?? undefined, + tooltip: hlinkNode.attr('tooltip') ?? undefined, + } + } + + return { + id, + name, + position, + size, + rotation, + flipH, + flipV, + placeholder, + hlinkClick, + source: spNode, + } +} diff --git a/apps/sim/lib/pptx-renderer/model/nodes/chart-node.ts b/apps/sim/lib/pptx-renderer/model/nodes/chart-node.ts new file mode 100644 index 00000000000..d1bc2de0327 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/nodes/chart-node.ts @@ -0,0 +1,55 @@ +/** + * Chart node — represents a chart embedded in a graphicFrame element. + */ + +import { type RelEntry, resolveRelTarget } from '../../parser/rel-parser' +import type { SafeXmlNode } from '../../parser/xml-parser' +import { type BaseNodeData, parseBaseProps } from './base-node' + +export interface ChartNodeData extends BaseNodeData { + nodeType: 'chart' + chartPath: string // e.g. "ppt/charts/chart1.xml" +} + +/** + * Parse a graphicFrame containing a chart reference into a ChartNodeData. + * + * @param graphicFrame The graphicFrame XML node + * @param slideRels Relationship entries for the containing slide + * @param slidePath Full path of the slide (e.g. "ppt/slides/slide1.xml") + */ +export function parseChartNode( + graphicFrame: SafeXmlNode, + slideRels: Map, + slidePath: string +): ChartNodeData | undefined { + const base = parseBaseProps(graphicFrame) + + // Find chart relationship + const graphic = graphicFrame.child('graphic') + const graphicData = graphic.child('graphicData') + + // Find the chart reference - look for c:chart element with r:id + let chartRId: string | undefined + for (const child of graphicData.allChildren()) { + if (child.localName === 'chart') { + chartRId = child.attr('r:id') || child.attr('id') + break + } + } + + if (!chartRId) return undefined + + const rel = slideRels.get(chartRId) + if (!rel) return undefined + + // Resolve chart path relative to slide + const slideDir = slidePath.substring(0, slidePath.lastIndexOf('/')) + const chartPath = resolveRelTarget(slideDir, rel.target) + + return { + ...base, + nodeType: 'chart' as const, + chartPath, + } +} diff --git a/apps/sim/lib/pptx-renderer/model/nodes/group-node.ts b/apps/sim/lib/pptx-renderer/model/nodes/group-node.ts new file mode 100644 index 00000000000..b357df57e35 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/nodes/group-node.ts @@ -0,0 +1,62 @@ +/** + * Group node parser — handles grouped shapes (p:grpSp). + */ + +import { emuToPx } from '../../parser/units' +import type { SafeXmlNode } from '../../parser/xml-parser' +import { type BaseNodeData, type Position, parseBaseProps, type Size } from './base-node' + +export interface GroupNodeData extends BaseNodeData { + nodeType: 'group' + childOffset: Position + childExtent: Size + /** @internal Raw XML nodes — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + children: SafeXmlNode[] +} + +/** Tag names of elements that can be children in a group's spTree. */ +const GROUP_CHILD_TAGS = new Set(['sp', 'pic', 'grpSp', 'graphicFrame', 'cxnSp']) + +/** + * Parse a group shape XML node (`p:grpSp`) into GroupNodeData. + */ +export function parseGroupNode(grpNode: SafeXmlNode): GroupNodeData { + const base = parseBaseProps(grpNode) + + // --- Child coordinate space from grpSpPr > a:xfrm --- + // OOXML: when chOff/chExt omitted, child box equals group box (chOff=0,0, chExt=ext). + const grpSpPr = grpNode.child('grpSpPr') + const xfrm = grpSpPr.child('xfrm') + const chOff = xfrm.child('chOff') + const chExt = xfrm.child('chExt') + + const childOffset: Position = chOff.exists() + ? { x: emuToPx(chOff.numAttr('x') ?? 0), y: emuToPx(chOff.numAttr('y') ?? 0) } + : { x: 0, y: 0 } + + const childExtent: Size = (() => { + if (!chExt.exists()) return { w: base.size.w, h: base.size.h } + const cx = chExt.numAttr('cx') + const cy = chExt.numAttr('cy') + return { + w: cx !== undefined && cx > 0 ? emuToPx(cx) : base.size.w, + h: cy !== undefined && cy > 0 ? emuToPx(cy) : base.size.h, + } + })() + + // --- Collect direct child shape nodes --- + const children: SafeXmlNode[] = [] + for (const child of grpNode.allChildren()) { + if (GROUP_CHILD_TAGS.has(child.localName)) { + children.push(child) + } + } + + return { + ...base, + nodeType: 'group', + childOffset, + childExtent, + children, + } +} diff --git a/apps/sim/lib/pptx-renderer/model/nodes/pic-node.ts b/apps/sim/lib/pptx-renderer/model/nodes/pic-node.ts new file mode 100644 index 00000000000..f73ae6e4e9e --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/nodes/pic-node.ts @@ -0,0 +1,102 @@ +/** + * Picture node parser — handles images, video placeholders, and audio placeholders. + */ + +import type { SafeXmlNode } from '../../parser/xml-parser' +import { type BaseNodeData, parseBaseProps } from './base-node' + +export interface CropRect { + top: number + bottom: number + left: number + right: number +} + +export interface PicNodeData extends BaseNodeData { + nodeType: 'picture' + blipEmbed?: string + blipLink?: string + crop?: CropRect + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + fill?: SafeXmlNode + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + line?: SafeXmlNode + isVideo?: boolean + isAudio?: boolean + mediaRId?: string +} + +/** OOXML encodes srcRect percentages as 1/100000 of full extent. */ +const CROP_DIVISOR = 100000 + +/** + * Parse a picture XML node (`p:pic`) into PicNodeData. + */ +export function parsePicNode(picNode: SafeXmlNode): PicNodeData { + const base = parseBaseProps(picNode) + + // --- Blip fill --- + const blipFill = picNode.child('blipFill') + const blip = blipFill.child('blip') + + // Try both namespaced and non-namespaced embed attribute + const blipEmbed = blip.attr('embed') ?? blip.attr('r:embed') + const blipLink = blip.attr('link') ?? blip.attr('r:link') + + // --- Crop (srcRect) --- + const srcRect = blipFill.child('srcRect') + let crop: CropRect | undefined + if (srcRect.exists()) { + const t = srcRect.numAttr('t') + const b = srcRect.numAttr('b') + const l = srcRect.numAttr('l') + const r = srcRect.numAttr('r') + if (t !== undefined || b !== undefined || l !== undefined || r !== undefined) { + crop = { + top: (t ?? 0) / CROP_DIVISOR, + bottom: (b ?? 0) / CROP_DIVISOR, + left: (l ?? 0) / CROP_DIVISOR, + right: (r ?? 0) / CROP_DIVISOR, + } + } + } + + // --- Shape properties (fill + line) --- + const spPr = picNode.child('spPr') + const solidFill = spPr.child('solidFill') + const gradFill = spPr.child('gradFill') + const fill = solidFill.exists() ? solidFill : gradFill.exists() ? gradFill : undefined + + const ln = spPr.child('ln') + const line = ln.exists() ? ln : undefined + + // --- Video / Audio detection --- + const nvPicPr = picNode.child('nvPicPr') + const nvPr = nvPicPr.child('nvPr') + + const videoFile = nvPr.child('videoFile') + const audioFile = nvPr.child('audioFile') + + const isVideo = videoFile.exists() + const isAudio = audioFile.exists() + + let mediaRId: string | undefined + if (isVideo) { + mediaRId = videoFile.attr('link') ?? videoFile.attr('r:link') + } else if (isAudio) { + mediaRId = audioFile.attr('link') ?? audioFile.attr('r:link') + } + + return { + ...base, + nodeType: 'picture', + blipEmbed, + blipLink, + crop, + fill, + line, + isVideo: isVideo || undefined, + isAudio: isAudio || undefined, + mediaRId, + } +} diff --git a/apps/sim/lib/pptx-renderer/model/nodes/shape-node.ts b/apps/sim/lib/pptx-renderer/model/nodes/shape-node.ts new file mode 100644 index 00000000000..a60ff6815ea --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/nodes/shape-node.ts @@ -0,0 +1,267 @@ +/** + * Shape node parser — handles auto-shapes, text boxes, and connectors. + */ + +import { angleToDeg, emuToPx } from '../../parser/units' +import type { SafeXmlNode } from '../../parser/xml-parser' +import { type BaseNodeData, parseBaseProps } from './base-node' + +export interface TextRun { + text: string + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + properties?: SafeXmlNode +} + +export interface TextParagraph { + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + properties?: SafeXmlNode + runs: TextRun[] + level: number + /** @internal End-of-paragraph run properties (a:endParaRPr). Defines font size for trailing paragraph mark. */ + endParaRPr?: SafeXmlNode +} + +export interface TextBody { + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + bodyProperties?: SafeXmlNode + /** @internal Fallback bodyPr from layout/master placeholder (used when shape's own bodyPr is missing attrs). */ + layoutBodyProperties?: SafeXmlNode + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + listStyle?: SafeXmlNode + paragraphs: TextParagraph[] +} + +export interface LineEndInfo { + type: string // 'triangle', 'arrow', 'stealth', 'diamond', 'oval', 'none' + w?: string // 'sm', 'med', 'lg' + len?: string // 'sm', 'med', 'lg' +} + +/** Text box bounds in shape-local coordinates (used by diagram shapes with txXfrm). */ +export interface TextBoxBounds { + x: number + y: number + w: number + h: number + rotation?: number +} + +export interface ShapeNodeData extends BaseNodeData { + nodeType: 'shape' + presetGeometry?: string + adjustments: Map + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + customGeometry?: SafeXmlNode + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + fill?: SafeXmlNode + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + line?: SafeXmlNode + headEnd?: LineEndInfo + tailEnd?: LineEndInfo + textBody?: TextBody + /** When set (e.g. diagram txXfrm), text is laid out in this rect instead of full shape. */ + textBoxBounds?: TextBoxBounds +} + +/** + * Parse a single text paragraph (`a:p`). + */ +function parseParagraph(pNode: SafeXmlNode): TextParagraph { + const pPr = pNode.child('pPr') + const level = pPr.numAttr('lvl') ?? 0 + + // Re-scan in document order to get correct interleaving of r, br, fld + const orderedRuns: TextRun[] = [] + for (const child of pNode.allChildren()) { + const ln = child.localName + if (ln === 'r') { + const rPr = child.child('rPr') + const tNode = child.child('t') + orderedRuns.push({ + text: tNode.text(), + properties: rPr.exists() ? rPr : undefined, + }) + } else if (ln === 'br') { + const rPr = child.child('rPr') + orderedRuns.push({ + text: '\n', + properties: rPr.exists() ? rPr : undefined, + }) + } else if (ln === 'fld') { + const rPr = child.child('rPr') + const tNode = child.child('t') + orderedRuns.push({ + text: tNode.text(), + properties: rPr.exists() ? rPr : undefined, + }) + } + } + + const endParaRPrNode = pNode.child('endParaRPr') + return { + properties: pPr.exists() ? pPr : undefined, + runs: orderedRuns, + level, + endParaRPr: endParaRPrNode.exists() ? endParaRPrNode : undefined, + } +} + +/** + * Parse a text body (`p:txBody` or `a:txBody`). + */ +export function parseTextBody(txBody: SafeXmlNode): TextBody | undefined { + if (!txBody.exists()) return undefined + + const bodyPr = txBody.child('bodyPr') + const lstStyle = txBody.child('lstStyle') + + const paragraphs: TextParagraph[] = [] + for (const pNode of txBody.children('p')) { + paragraphs.push(parseParagraph(pNode)) + } + + return { + bodyProperties: bodyPr.exists() ? bodyPr : undefined, + listStyle: lstStyle.exists() ? lstStyle : undefined, + paragraphs, + } +} + +/** Fill type local names in priority order. */ +const FILL_TYPES = ['solidFill', 'gradFill', 'blipFill', 'pattFill', 'grpFill', 'noFill'] as const + +/** + * Find the first fill element in a shape properties node. + */ +function findFill(spPr: SafeXmlNode): SafeXmlNode | undefined { + for (const fillType of FILL_TYPES) { + const fill = spPr.child(fillType) + if (fill.exists()) return fill + } + return undefined +} + +/** + * Parse adjustment values from `a:avLst > a:gd` elements. + * Each guide has a `name` attribute and a `fmla` attribute like "val 50000". + */ +function parseAdjustments(avLst: SafeXmlNode): Map { + const adjustments = new Map() + for (const gd of avLst.children('gd')) { + const name = gd.attr('name') + const fmla = gd.attr('fmla') ?? '' + if (!name) continue + + // fmla is typically "val NNNNN" — extract the numeric part + const match = fmla.match(/val\s+(-?\d+)/) + if (match) { + adjustments.set(name, Number(match[1])) + } else { + // Try direct numeric value + const num = Number(fmla) + if (!Number.isNaN(num)) { + adjustments.set(name, num) + } + } + } + return adjustments +} + +/** + * Parse a shape XML node (`p:sp` or `p:cxnSp`) into ShapeNodeData. + */ +export function parseShapeNode(spNode: SafeXmlNode): ShapeNodeData { + const base = parseBaseProps(spNode) + const spPr = spNode.child('spPr') + + // --- Preset geometry --- + const prstGeom = spPr.child('prstGeom') + const presetGeometry = prstGeom.attr('prst') + const avLst = prstGeom.child('avLst') + const adjustments = parseAdjustments(avLst) + + // --- Custom geometry --- + const custGeom = spPr.child('custGeom') + const customGeometry = custGeom.exists() ? custGeom : undefined + + // --- Fill --- + const fill = findFill(spPr) + + // --- Line --- + const ln = spPr.child('ln') + const line = ln.exists() ? ln : undefined + + // --- Line end markers (arrowheads) --- + let headEnd: LineEndInfo | undefined + let tailEnd: LineEndInfo | undefined + if (ln.exists()) { + const headEndNode = ln.child('headEnd') + if (headEndNode.exists()) { + const t = headEndNode.attr('type') + if (t && t !== 'none') { + headEnd = { type: t, w: headEndNode.attr('w'), len: headEndNode.attr('len') } + } + } + const tailEndNode = ln.child('tailEnd') + if (tailEndNode.exists()) { + const t = tailEndNode.attr('type') + if (t && t !== 'none') { + tailEnd = { type: t, w: tailEndNode.attr('w'), len: tailEndNode.attr('len') } + } + } + } + + // --- Text body --- + const txBody = spNode.child('txBody') + const textBody = parseTextBody(txBody) + + // --- Text transform (diagram shapes: dsp:txXfrm gives text box position/size in same space as xfrm) + let textBoxBounds: TextBoxBounds | undefined + const txXfrm = spNode.child('txXfrm') + if (txXfrm.exists()) { + const txOff = txXfrm.child('off') + const txExt = txXfrm.child('ext') + const xfrm = spPr.child('xfrm') + const off = xfrm.child('off') + const ext = xfrm.child('ext') + const shapeX = off.numAttr('x') ?? 0 + const shapeY = off.numAttr('y') ?? 0 + const shapeW = ext.numAttr('cx') ?? 0 + const shapeH = ext.numAttr('cy') ?? 0 + const txX = txOff.numAttr('x') ?? 0 + const txY = txOff.numAttr('y') ?? 0 + const txW = txExt.numAttr('cx') ?? 0 + const txH = txExt.numAttr('cy') ?? 0 + if (shapeW > 0 && shapeH > 0) { + const txRotDeg = angleToDeg(txXfrm.numAttr('rot') ?? 0) + const localX = txX - shapeX + const localY = txY - shapeY + // For 180deg txXfrm, mirror text box placement inside shape-local coordinates. + // (Common in SmartArt where shape xfrm also rotates by 180deg but text should remain upright.) + const isHalfTurn = Math.abs(Math.round(txRotDeg)) % 360 === 180 + const boxX = isHalfTurn ? shapeW - (localX + txW) : localX + const boxY = isHalfTurn ? shapeH - (localY + txH) : localY + textBoxBounds = { + x: emuToPx(boxX), + y: emuToPx(boxY), + w: emuToPx(txW), + h: emuToPx(txH), + rotation: txRotDeg, + } + } + } + + return { + ...base, + nodeType: 'shape', + presetGeometry, + adjustments, + customGeometry, + fill, + line, + headEnd, + tailEnd, + textBody, + textBoxBounds, + } +} diff --git a/apps/sim/lib/pptx-renderer/model/nodes/table-node.ts b/apps/sim/lib/pptx-renderer/model/nodes/table-node.ts new file mode 100644 index 00000000000..8b41f7cbdfd --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/nodes/table-node.ts @@ -0,0 +1,135 @@ +/** + * Table node parser — handles graphicFrame elements containing a:tbl. + */ + +import { emuToPx } from '../../parser/units' +import type { SafeXmlNode } from '../../parser/xml-parser' +import { type BaseNodeData, parseBaseProps } from './base-node' +import { parseTextBody, type TextBody } from './shape-node' + +export interface TableCell { + gridSpan: number + rowSpan: number + hMerge: boolean + vMerge: boolean + textBody?: TextBody + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + properties?: SafeXmlNode +} + +export interface TableRow { + height: number + cells: TableCell[] +} + +export interface TableNodeData extends BaseNodeData { + nodeType: 'table' + columns: number[] + rows: TableRow[] + /** @internal Raw XML node — opaque to consumers. Use serializePresentation() for JSON-safe data. */ + properties?: SafeXmlNode + tableStyleId?: string +} + +/** + * Parse a single table cell (`a:tc`). + */ +function parseCell(tcNode: SafeXmlNode): TableCell { + const gridSpan = tcNode.numAttr('gridSpan') ?? 1 + const rowSpan = tcNode.numAttr('rowSpan') ?? 1 + const hMerge = tcNode.attr('hMerge') === '1' || tcNode.attr('hMerge') === 'true' + const vMerge = tcNode.attr('vMerge') === '1' || tcNode.attr('vMerge') === 'true' + + // Cell text body + const txBody = tcNode.child('txBody') + const textBody = parseTextBody(txBody) + + // Cell properties + const tcPr = tcNode.child('tcPr') + + return { + gridSpan, + rowSpan, + hMerge, + vMerge, + textBody, + properties: tcPr.exists() ? tcPr : undefined, + } +} + +/** + * Parse a table row (`a:tr`). + */ +function parseRow(trNode: SafeXmlNode): TableRow { + const height = emuToPx(trNode.numAttr('h') ?? 0) + const cells: TableCell[] = [] + + for (const tcNode of trNode.children('tc')) { + cells.push(parseCell(tcNode)) + } + + return { height, cells } +} + +/** + * Locate the `a:tbl` element inside a graphicFrame. + * Path: `a:graphic > a:graphicData > a:tbl` + */ +function findTable(frameNode: SafeXmlNode): SafeXmlNode { + const graphic = frameNode.child('graphic') + const graphicData = graphic.child('graphicData') + return graphicData.child('tbl') +} + +/** + * Extract the table style ID from tblPr. + * It can be in `a:tblStyle@val` or as a direct `tblStyle` attribute. + */ +function extractTableStyleId(tblPr: SafeXmlNode): string | undefined { + // Try {UUID} (most common in OOXML) + const tableStyleIdNode = tblPr.child('tableStyleId') + if (tableStyleIdNode.exists()) { + return tableStyleIdNode.text() || tableStyleIdNode.attr('val') || undefined + } + // Try + const tblStyleNode = tblPr.child('tblStyle') + if (tblStyleNode.exists()) { + return tblStyleNode.attr('val') ?? (tblStyleNode.text() || undefined) + } + // Try direct attribute + return tblPr.attr('tblStyle') ?? undefined +} + +/** + * Parse a graphicFrame XML node containing a table into TableNodeData. + */ +export function parseTableNode(frameNode: SafeXmlNode): TableNodeData { + const base = parseBaseProps(frameNode) + const tbl = findTable(frameNode) + + // --- Column widths --- + const tblGrid = tbl.child('tblGrid') + const columns: number[] = [] + for (const gridCol of tblGrid.children('gridCol')) { + columns.push(emuToPx(gridCol.numAttr('w') ?? 0)) + } + + // --- Rows --- + const rows: TableRow[] = [] + for (const trNode of tbl.children('tr')) { + rows.push(parseRow(trNode)) + } + + // --- Table properties --- + const tblPr = tbl.child('tblPr') + const tableStyleId = extractTableStyleId(tblPr) + + return { + ...base, + nodeType: 'table', + columns, + rows, + properties: tblPr.exists() ? tblPr : undefined, + tableStyleId, + } +} diff --git a/apps/sim/lib/pptx-renderer/model/presentation.test.ts b/apps/sim/lib/pptx-renderer/model/presentation.test.ts new file mode 100644 index 00000000000..3a5063bf995 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/presentation.test.ts @@ -0,0 +1,79 @@ +/** + * @vitest-environment jsdom + */ +import { describe, expect, it } from 'vitest' +import { buildPresentation } from '@/lib/pptx-renderer/model/presentation' +import type { PptxFiles } from '@/lib/pptx-renderer/parser/zip-parser' + +function createFiles(presentation: string): PptxFiles { + return { + contentTypes: '', + presentation, + presentationRels: ` + + + `, + slides: new Map([ + ['ppt/slides/slide1.xml', createSlideXml()], + ['ppt/slides/slide2.xml', createSlideXml()], + ]), + slideRels: new Map([ + ['ppt/slides/_rels/slide1.xml.rels', ''], + ['ppt/slides/_rels/slide2.xml.rels', ''], + ]), + slideLayouts: new Map(), + slideLayoutRels: new Map(), + slideMasters: new Map(), + slideMasterRels: new Map(), + themes: new Map(), + media: new Map(), + charts: new Map(), + chartStyles: new Map(), + chartColors: new Map(), + diagramDrawings: new Map(), + } +} + +function createPresentationXml(markers = ''): string { + return ` + + + + + + ` +} + +function createSlideXml(): string { + return ` + + + + + + + ` +} + +describe('buildPresentation', () => { + it('does not treat the standard wps namespace prefix as WPS Office', () => { + const presentation = buildPresentation( + createFiles( + createPresentationXml( + 'xmlns:wps="http://schemas.microsoft.com/office/word/2010/wordprocessingShape"' + ) + ) + ) + + expect(presentation.isWps).toBe(false) + }) + + it('orders slides by relationship id instead of numeric slide id', () => { + const presentation = buildPresentation(createFiles(createPresentationXml())) + + expect(presentation.slides.map((slide) => slide.slidePath)).toEqual([ + 'ppt/slides/slide2.xml', + 'ppt/slides/slide1.xml', + ]) + }) +}) diff --git a/apps/sim/lib/pptx-renderer/model/presentation.ts b/apps/sim/lib/pptx-renderer/model/presentation.ts new file mode 100644 index 00000000000..bdb3564d1d4 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/presentation.ts @@ -0,0 +1,486 @@ +/** + * Top-level presentation builder — assembles all parsed components + * (themes, masters, layouts, slides) into a single PresentationData structure. + */ + +import { parseRels, type RelEntry, resolveRelTarget } from '../parser/rel-parser' +import { emuToPx } from '../parser/units' +import { parseXml, type SafeXmlNode } from '../parser/xml-parser' +import type { PptxFiles } from '../parser/zip-parser' +import { type LayoutData, type PlaceholderEntry, parseLayout } from './layout' +import { type MasterData, parseMaster } from './master' +import type { Position, Size } from './nodes/base-node' +import { parseSlide, type SlideData, type SlideNode } from './slide' +import { parseTheme, type ThemeData } from './theme' + +export interface PresentationData { + width: number + height: number + slides: SlideData[] + layouts: Map + masters: Map + themes: Map + slideToLayout: Map + layoutToMaster: Map + masterToTheme: Map + media: Map + tableStyles?: SafeXmlNode + charts: Map + isWps: boolean +} + +/** + * Derive the base directory from a file path. + * E.g., "ppt/slides/slide1.xml" → "ppt/slides" + */ +function basePath(filePath: string): string { + const idx = filePath.lastIndexOf('/') + return idx >= 0 ? filePath.substring(0, idx) : '' +} + +/** + * For a given XML file path, find its corresponding .rels file path. + * E.g., "ppt/slides/slide1.xml" → "ppt/slides/_rels/slide1.xml.rels" + */ +function relsPathFor(filePath: string): string { + const dir = basePath(filePath) + const fileName = filePath.substring(filePath.lastIndexOf('/') + 1) + return `${dir}/_rels/${fileName}.rels` +} + +/** + * Detect WPS (Kingsoft Office / WPS Office) by checking for known markers + * in the presentation XML string. + */ +function detectWps(presentationXml: string): boolean { + return ( + /\bKingsoft\b/i.test(presentationXml) || + /\bWPS Office\b/i.test(presentationXml) || + /xmlns:[\w.-]*kso[\w.-]*=/i.test(presentationXml) + ) +} + +/** + * Find a rels entry by type substring match. + */ +function findRelByType(rels: Map, typeSubstring: string): RelEntry | undefined { + for (const [, entry] of rels) { + if (entry.type.includes(typeSubstring)) { + return entry + } + } + return undefined +} + +/** + * Find ALL rels entries matching a type substring, returning [rId, entry] pairs. + */ +function findRelsByType(rels: Map, typeSubstring: string): [string, RelEntry][] { + const results: [string, RelEntry][] = [] + for (const [rId, entry] of rels) { + if (entry.type.includes(typeSubstring)) { + results.push([rId, entry]) + } + } + return results +} + +/** + * Build the complete PresentationData from extracted PPTX files. + * + * This is the main factory function that wires together all parsed components: + * 1. Parses presentation.xml for slide ordering and size + * 2. Resolves the full relationship chain: slide → layout → master → theme + * 3. Parses each component and assembles the final structure + */ +export function buildPresentation(files: PptxFiles): PresentationData { + // --- Parse presentation root --- + const presRoot = parseXml(files.presentation) + const presRels = parseRels(files.presentationRels) + + // --- Slide size --- + const sldSz = presRoot.child('sldSz') + const width = emuToPx(sldSz.numAttr('cx') ?? 9144000) // default 10 inches + const height = emuToPx(sldSz.numAttr('cy') ?? 6858000) // default 7.5 inches + + // --- WPS detection --- + const isWps = detectWps(files.presentation) + + // --- Parse themes --- + const themes = new Map() + for (const [themePath, themeXml] of files.themes) { + const themeRoot = parseXml(themeXml) + themes.set(themePath, parseTheme(themeRoot)) + } + + // --- Parse slide masters and build master→theme mapping --- + const masters = new Map() + const masterToTheme = new Map() + + for (const [masterPath, masterXml] of files.slideMasters) { + const masterRoot = parseXml(masterXml) + const masterData = parseMaster(masterRoot) + + // Find theme relationship for this master + const masterRelsPath = relsPathFor(masterPath) + const masterRelsXml = files.slideMasterRels.get(masterRelsPath) + if (masterRelsXml) { + const masterRels = parseRels(masterRelsXml) + masterData.rels = masterRels + const themeRel = findRelByType(masterRels, 'theme') + if (themeRel) { + const themePath = resolveRelTarget(basePath(masterPath), themeRel.target) + masterToTheme.set(masterPath, themePath) + } + } + masters.set(masterPath, masterData) + } + + // --- Parse slide layouts and build layout→master mapping --- + const layouts = new Map() + const layoutToMaster = new Map() + + for (const [layoutPath, layoutXml] of files.slideLayouts) { + const layoutRoot = parseXml(layoutXml) + const layoutData = parseLayout(layoutRoot) + + // Find master relationship for this layout + const layoutRelsPath = relsPathFor(layoutPath) + const layoutRelsXml = files.slideLayoutRels.get(layoutRelsPath) + if (layoutRelsXml) { + const layoutRels = parseRels(layoutRelsXml) + layoutData.rels = layoutRels + const masterRel = findRelByType(layoutRels, 'slideMaster') + if (masterRel) { + const masterPath = resolveRelTarget(basePath(layoutPath), masterRel.target) + layoutToMaster.set(layoutPath, masterPath) + } + } + layouts.set(layoutPath, layoutData) + } + + // --- Parse charts --- + const charts = new Map() + for (const [chartPath, chartXml] of files.charts) { + const chartRoot = parseXml(chartXml) + if (chartRoot.exists()) { + charts.set(chartPath, chartRoot) + } + } + + // --- Determine slide ordering --- + // The sldIdLst contains sldId elements with r:id attributes that reference + // presentation.xml.rels. We need to handle the fact that the attr might be + // stored as 'r:id' in the original XML but SafeXmlNode.attr() uses localName. + const sldIdLst = presRoot.child('sldIdLst') + const orderedSlideTargets: string[] = [] + + for (const sldId of sldIdLst.children('sldId')) { + // Try multiple attribute name patterns + const rId = sldId.attr('r:id') ?? sldId.attr('id') + if (rId) { + const relEntry = presRels.get(rId) + if (relEntry) { + const slidePath = resolveRelTarget('ppt', relEntry.target) + orderedSlideTargets.push(slidePath) + } + } + } + + // Fallback: if sldIdLst parsing didn't yield results, use presRels directly + if (orderedSlideTargets.length === 0) { + const slideRels = findRelsByType(presRels, 'slide') + // Sort by rId number to maintain order + slideRels.sort((a, b) => { + const numA = Number.parseInt(a[0].replace(/\D/g, ''), 10) || 0 + const numB = Number.parseInt(b[0].replace(/\D/g, ''), 10) || 0 + return numA - numB + }) + for (const [, entry] of slideRels) { + // Only include direct slide relationships, not slideLayout or slideMaster + if ( + entry.type.includes('/slide') && + !entry.type.includes('slideLayout') && + !entry.type.includes('slideMaster') + ) { + const slidePath = resolveRelTarget('ppt', entry.target) + orderedSlideTargets.push(slidePath) + } + } + } + + // --- Parse slides --- + const slides: SlideData[] = [] + const slideToLayout = new Map() + + for (let i = 0; i < orderedSlideTargets.length; i++) { + const slidePath = orderedSlideTargets[i] + const slideXml = files.slides.get(slidePath) + if (!slideXml) continue + + // Parse slide rels + const slideRelsPath = relsPathFor(slidePath) + const slideRelsXml = files.slideRels.get(slideRelsPath) + const slideRels = slideRelsXml ? parseRels(slideRelsXml) : new Map() + + // Parse slide + const slideRoot = parseXml(slideXml) + const slideData = parseSlide(slideRoot, i, slideRels, slidePath, files.diagramDrawings) + + // Resolve layout path from the slide's layout relationship target + if (slideData.layoutIndex) { + const layoutPath = resolveRelTarget(basePath(slidePath), slideData.layoutIndex) + slideData.layoutIndex = layoutPath + slideToLayout.set(i, layoutPath) + } + + slides.push(slideData) + } + + // --- Table styles --- + let tableStyles: SafeXmlNode | undefined + if (files.tableStyles) { + const tsRoot = parseXml(files.tableStyles) + if (tsRoot.exists()) { + tableStyles = tsRoot + } + } + + const result: PresentationData = { + width, + height, + slides, + layouts, + masters, + themes, + slideToLayout, + layoutToMaster, + masterToTheme, + media: files.media, + tableStyles, + charts, + isWps, + } + + // --- Resolve placeholder position inheritance --- + resolvePlaceholderInheritance(result) + + return result +} + +// --------------------------------------------------------------------------- +// Placeholder Position Inheritance +// --------------------------------------------------------------------------- + +/** + * Extract placeholder info (type, idx) from a raw placeholder XML node + * stored in layout/master. + */ +function getPhInfo(phNode: SafeXmlNode): { type?: string; idx?: number } { + // Try nvSpPr > nvPr > ph, or nvPicPr > nvPr > ph + for (const wrapper of ['nvSpPr', 'nvPicPr', 'nvGrpSpPr', 'nvGraphicFramePr', 'nvCxnSpPr']) { + const nvWrapper = phNode.child(wrapper) + if (nvWrapper.exists()) { + const nvPr = nvWrapper.child('nvPr') + const ph = nvPr.child('ph') + if (ph.exists()) { + const type = ph.attr('type') + const idxStr = ph.attr('idx') + const idx = idxStr !== undefined ? Number(idxStr) : undefined + return { type, idx: idx !== undefined && !Number.isNaN(idx) ? idx : undefined } + } + } + } + return {} +} + +/** + * Extract xfrm position/size from a raw placeholder XML node. + */ +function getPhXfrm(phNode: SafeXmlNode): { position: Position; size: Size } | undefined { + // Try spPr > xfrm first (most shapes) + const spPr = phNode.child('spPr') + if (spPr.exists()) { + const xfrm = spPr.child('xfrm') + if (xfrm.exists()) { + const off = xfrm.child('off') + const ext = xfrm.child('ext') + const x = off.numAttr('x') + const cx = ext.numAttr('cx') + if (x !== undefined && cx !== undefined) { + return { + position: { x: emuToPx(off.numAttr('x') ?? 0), y: emuToPx(off.numAttr('y') ?? 0) }, + size: { w: emuToPx(ext.numAttr('cx') ?? 0), h: emuToPx(ext.numAttr('cy') ?? 0) }, + } + } + } + } + return undefined +} + +/** + * Find a matching placeholder node by type and idx. + * Matching rules (based on OOXML spec): + * 1. Exact match on type AND idx + * 2. Match on type only (if idx is undefined or not found) + * 3. For "body" type, also check idx match only + */ +function findMatchingPlaceholder( + placeholders: SafeXmlNode[], + type?: string, + idx?: number +): SafeXmlNode | undefined { + let typeMatch: SafeXmlNode | undefined + + for (const ph of placeholders) { + const info = getPhInfo(ph) + + // Exact match (type + idx) + if (type !== undefined && info.type === type && idx !== undefined && info.idx === idx) { + return ph + } + + // Type-only match + if (type !== undefined && info.type === type) { + if (!typeMatch) typeMatch = ph + } + + // idx-only match (for body/content placeholders that may omit type) + if (idx !== undefined && info.idx === idx && type === undefined && info.type === undefined) { + return ph + } + } + + // For placeholders without type (defaults to "body"), match by idx + if (type === undefined && idx !== undefined) { + for (const ph of placeholders) { + const info = getPhInfo(ph) + if (info.idx === idx) return ph + } + } + + return typeMatch +} + +/** + * Find a matching layout placeholder (PlaceholderEntry); use entry.absoluteXfrm when present. + */ +function findMatchingLayoutPlaceholder( + placeholders: PlaceholderEntry[], + type?: string, + idx?: number +): PlaceholderEntry | undefined { + let typeMatch: PlaceholderEntry | undefined + + for (const entry of placeholders) { + const info = getPhInfo(entry.node) + + if (type !== undefined && info.type === type && idx !== undefined && info.idx === idx) { + return entry + } + if (type !== undefined && info.type === type && !typeMatch) { + typeMatch = entry + } + if (idx !== undefined && info.idx === idx && type === undefined && info.type === undefined) { + return entry + } + } + if (type === undefined && idx !== undefined) { + for (const entry of placeholders) { + if (getPhInfo(entry.node).idx === idx) return entry + } + } + return typeMatch +} + +/** + * Walk through all slide nodes (including group children recursively) + * and fill in missing position/size from layout/master placeholders. + */ +function resolvePlaceholderInheritance(pres: PresentationData): void { + for (let i = 0; i < pres.slides.length; i++) { + const slide = pres.slides[i] + const layoutPath = pres.slideToLayout.get(i) + const layout = layoutPath ? pres.layouts.get(layoutPath) : undefined + const masterPath = layoutPath ? pres.layoutToMaster.get(layoutPath) : undefined + const master = masterPath ? pres.masters.get(masterPath) : undefined + + resolveNodesPlaceholders(slide.nodes, layout, master) + } +} + +/** Extract bodyPr from a placeholder shape node (layout or master). */ +function getPhBodyPr(phNode: SafeXmlNode): SafeXmlNode | undefined { + const txBody = phNode.child('txBody') + if (!txBody.exists()) return undefined + const bodyPr = txBody.child('bodyPr') + return bodyPr.exists() ? bodyPr : undefined +} + +function resolveNodesPlaceholders( + nodes: SlideNode[], + layout: LayoutData | undefined, + master: MasterData | undefined +): void { + for (const node of nodes) { + // Recursively handle group children + if (node.nodeType === 'group' && 'children' in node) { + // Group children are raw SafeXmlNode, not parsed yet — skip + // (they get parsed during rendering in GroupRenderer) + } + + if (!node.placeholder) continue + + const { type, idx } = node.placeholder + const sizeIsEmpty = node.size.w === 0 && node.size.h === 0 + const positionLooksDefault = node.position.y < 5 // y=0 or near top → use layout position + + if (layout) { + const layoutMatch = findMatchingLayoutPlaceholder(layout.placeholders, type, idx) + if (layoutMatch) { + const xfrm = layoutMatch.absoluteXfrm ?? getPhXfrm(layoutMatch.node) + if (xfrm) { + if (sizeIsEmpty) { + node.position = xfrm.position + node.size = xfrm.size + } else if (positionLooksDefault) { + node.position = xfrm.position + } + } + + // Inherit bodyPr from layout placeholder for text rendering (anchor, insets, etc.) + if ('textBody' in node && node.textBody) { + const layoutBodyPr = getPhBodyPr(layoutMatch.node) + if (layoutBodyPr) { + node.textBody.layoutBodyProperties = layoutBodyPr + } + } + + if (xfrm) continue + } + } + + if (master) { + const match = findMatchingPlaceholder(master.placeholders, type, idx) + if (match) { + const xfrm = getPhXfrm(match) + if (xfrm) { + if (sizeIsEmpty) { + node.position = xfrm.position + node.size = xfrm.size + } else if (positionLooksDefault) { + node.position = xfrm.position + } + } + + // Inherit bodyPr from master placeholder as fallback + if ('textBody' in node && node.textBody && !node.textBody.layoutBodyProperties) { + const masterBodyPr = getPhBodyPr(match) + if (masterBodyPr) { + node.textBody.layoutBodyProperties = masterBodyPr + } + } + } + } + } +} diff --git a/apps/sim/lib/pptx-renderer/model/slide.ts b/apps/sim/lib/pptx-renderer/model/slide.ts new file mode 100644 index 00000000000..fe5dacecb44 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/slide.ts @@ -0,0 +1,329 @@ +/** + * Slide parser — converts a slide XML into a structured SlideData + * with typed node objects for each shape on the slide. + */ + +import { type RelEntry, resolveRelTarget } from '../parser/rel-parser' +import { parseXml, type SafeXmlNode } from '../parser/xml-parser' +import { parseBaseProps } from './nodes/base-node' +import { type ChartNodeData, parseChartNode } from './nodes/chart-node' +import { type GroupNodeData, parseGroupNode } from './nodes/group-node' +import { type PicNodeData, parsePicNode } from './nodes/pic-node' +import { parseShapeNode, type ShapeNodeData } from './nodes/shape-node' +import { parseTableNode, type TableNodeData } from './nodes/table-node' + +export type SlideNode = ShapeNodeData | PicNodeData | TableNodeData | GroupNodeData | ChartNodeData + +export interface SlideData { + index: number + nodes: SlideNode[] + background?: SafeXmlNode + layoutIndex: string + rels: Map + /** Full path to the slide file (e.g. "ppt/slides/slide3.xml"). */ + slidePath: string + /** When false, shapes from the layout and master should NOT be rendered on this slide. */ + showMasterSp: boolean +} + +/** + * Check whether a graphicFrame contains a table (`a:tbl`). + */ +function isTableFrame(node: SafeXmlNode): boolean { + const graphic = node.child('graphic') + const graphicData = graphic.child('graphicData') + return graphicData.child('tbl').exists() +} + +/** + * Check whether a graphicFrame contains a chart. + */ +function isChartFrame(node: SafeXmlNode): boolean { + const graphic = node.child('graphic') + const graphicData = graphic.child('graphicData') + const uri = graphicData.attr('uri') || '' + return uri.includes('chart') +} + +/** + * Find p:pic inside OLE graphicData (mc:AlternateContent > mc:Fallback or mc:Choice > p:oleObj > p:pic). + * Returns the pic node if it has blipFill with embed (so we can render the preview image). + */ +function findOleFallbackPic(graphicFrame: SafeXmlNode): SafeXmlNode | null { + const graphic = graphicFrame.child('graphic') + const graphicData = graphic.child('graphicData') + const uri = graphicData.attr('uri') || '' + if (!uri.includes('ole')) return null + + const altContent = graphicData.child('AlternateContent') + if (!altContent.exists()) return null + + for (const branch of ['Fallback', 'Choice'] as const) { + const oleObj = altContent.child(branch).child('oleObj') + if (!oleObj.exists()) continue + const pic = oleObj.child('pic') + if (!pic.exists()) continue + const blipFill = pic.child('blipFill') + const blip = blipFill.child('blip') + const embed = blip.attr('embed') ?? blip.attr('r:embed') + if (embed) return pic + } + return null +} + +/** + * Parse a graphicFrame that contains an OLE object with a fallback picture (preview image). + * Uses the frame's position/size and the inner pic's blip embed. + * Exported for use in GroupRenderer when parsing group children. + */ +export function parseOleFrameAsPicture(graphicFrame: SafeXmlNode): PicNodeData | undefined { + const pic = findOleFallbackPic(graphicFrame) + if (!pic) return undefined + + const base = parseBaseProps(graphicFrame) + const blipFill = pic.child('blipFill') + const blip = blipFill.child('blip') + const blipEmbed = blip.attr('embed') ?? blip.attr('r:embed') + const blipLink = blip.attr('link') ?? blip.attr('r:link') + if (!blipEmbed) return undefined + + return { + ...base, + nodeType: 'picture', + blipEmbed, + blipLink, + source: graphicFrame, + } +} + +/** + * Check whether a graphicFrame contains a SmartArt diagram. + */ +function isDiagramFrame(node: SafeXmlNode): boolean { + const graphic = node.child('graphic') + const graphicData = graphic.child('graphicData') + const uri = graphicData.attr('uri') || '' + return uri.includes('diagram') +} + +/** + * Parse a SmartArt diagram graphicFrame by resolving the diagram drawing fallback XML. + * The drawing XML contains pre-rendered shapes in a spTree that we can display as a group. + */ +function parseDiagramFrame( + graphicFrame: SafeXmlNode, + rels: Map, + slidePath: string, + diagramDrawings: Map +): GroupNodeData | undefined { + const base = parseBaseProps(graphicFrame) + const slideDir = slidePath.substring(0, slidePath.lastIndexOf('/')) + const drawingCandidates = Array.from(rels.values()) + .filter( + (entry) => entry.type.includes('diagramDrawing') || entry.target.includes('diagrams/drawing') + ) + .map((entry) => { + const target = entry.target + const match = target.match(/drawing(\d+)/) + return { + target, + num: match ? Number.parseInt(match[1], 10) : undefined, + } + }) + + // Extract the diagram data rId from the relIds element to identify which diagram this is + const graphic = graphicFrame.child('graphic') + const graphicData = graphic.child('graphicData') + const relIds = graphicData.child('relIds') + + // Strategy 1: Match data file number to drawing file number + // e.g. data3.xml → drawing3.xml + if (relIds.exists()) { + const dmRId = relIds.attr('r:dm') ?? relIds.attr('dm') + if (dmRId) { + const dmRel = rels.get(dmRId) + if (dmRel) { + // Extract the number from the data target (e.g. "data3" → "3") + const numMatch = dmRel.target.match(/data(\d+)/) + if (numMatch) { + const drawingNum = Number.parseInt(numMatch[1], 10) + // Prefer exact drawingN; if absent, use the nearest numbered drawing relation. + const ordered = drawingCandidates.slice().sort((a, b) => { + const da = a.num === undefined ? Number.POSITIVE_INFINITY : Math.abs(a.num - drawingNum) + const db = b.num === undefined ? Number.POSITIVE_INFINITY : Math.abs(b.num - drawingNum) + return da - db + }) + for (const candidate of ordered) { + const drawingPath = resolveRelTarget(slideDir, candidate.target) + const drawingXml = diagramDrawings.get(drawingPath) + if (drawingXml) { + return buildDiagramGroup(base, drawingXml) + } + } + } + } + } + } + + // Strategy 2: Fallback - find any diagramDrawing relationship + for (const candidate of drawingCandidates) { + const drawingPath = resolveRelTarget(slideDir, candidate.target) + const drawingXml = diagramDrawings.get(drawingPath) + if (drawingXml) { + return buildDiagramGroup(base, drawingXml) + } + } + + return undefined +} + +/** + * Build a GroupNodeData from a diagram drawing XML string. + * Diagram drawings use dsp: namespace (drawingml 2008); structure is dsp:drawing > dsp:spTree > dsp:sp. + * Diagram shapes are positioned in the graphicFrame's own coordinate space. + */ +function buildDiagramGroup( + base: ReturnType, + drawingXml: string +): GroupNodeData { + const drawingRoot = parseXml(drawingXml) + const spTree = drawingRoot.child('spTree') + if (!spTree.exists()) { + return { + ...base, + nodeType: 'group', + childOffset: { x: 0, y: 0 }, + childExtent: { w: base.size.w, h: base.size.h }, + children: [], + } + } + + const CHILD_TAGS = new Set(['sp', 'pic', 'grpSp', 'graphicFrame', 'cxnSp']) + const children: SafeXmlNode[] = [] + + for (const child of spTree.allChildren()) { + if (CHILD_TAGS.has(child.localName)) { + children.push(child) + } + } + + // Use the graphicFrame's own dimensions as the child coordinate space. + // Diagram shapes are positioned in the frame's coordinate space (EMU converted to px). + // Using frame dimensions gives a 1:1 scale, preserving original positions and sizes. + // This avoids enlarging shapes when the bounding box is smaller than the frame. + const extentW = Math.max(1, base.size.w) + const extentH = Math.max(1, base.size.h) + + return { + ...base, + nodeType: 'group', + childOffset: { x: 0, y: 0 }, + childExtent: { w: extentW, h: extentH }, + children, + } +} + +/** + * Parse a single child node from spTree, dispatching to the appropriate parser. + */ +function parseChildNode( + child: SafeXmlNode, + rels: Map, + slidePath: string, + diagramDrawings?: Map +): SlideNode | undefined { + const tag = child.localName + + switch (tag) { + case 'sp': + case 'cxnSp': + return parseShapeNode(child) + case 'pic': + return parsePicNode(child) + case 'grpSp': + return parseGroupNode(child) + case 'graphicFrame': + if (isTableFrame(child)) { + return parseTableNode(child) + } + if (isChartFrame(child)) { + return parseChartNode(child, rels, slidePath) + } + // SmartArt diagram with drawing fallback + if (isDiagramFrame(child) && diagramDrawings) { + return parseDiagramFrame(child, rels, slidePath, diagramDrawings) + } + // OLE object with fallback picture (e.g. embedded PDF preview on slide 34) + { + const olePic = parseOleFrameAsPicture(child) + if (olePic) return olePic + } + // Non-table/chart/ole graphic frames — skip + return undefined + default: + return undefined + } +} + +/** + * Find the layout relationship target from a slide's rels map. + * The relationship type URI for slide layouts ends with "slideLayout". + */ +function findLayoutRel(rels: Map): string { + for (const [, entry] of rels) { + if (entry.type.includes('slideLayout')) { + return entry.target + } + } + return '' +} + +/** + * Parse a slide XML root (`p:sld`) into SlideData. + * + * @param root Parsed XML root of the slide + * @param index Zero-based slide index + * @param rels Relationship entries for this slide + * @param slidePath Full path to the slide file (e.g. "ppt/slides/slide1.xml") + */ +export function parseSlide( + root: SafeXmlNode, + index: number, + rels: Map, + slidePath = '', + diagramDrawings?: Map +): SlideData { + const cSld = root.child('cSld') + + // --- Background --- + const bg = cSld.child('bg') + const background = bg.exists() ? bg : undefined + + // --- Parse shape tree children --- + const spTree = cSld.child('spTree') + const nodes: SlideNode[] = [] + + for (const child of spTree.allChildren()) { + const node = parseChildNode(child, rels, slidePath, diagramDrawings) + if (node) { + nodes.push(node) + } + } + + // --- Layout relationship --- + const layoutIndex = findLayoutRel(rels) + + // --- showMasterSp: if "0", layout/master shapes should not be rendered on this slide --- + const showMasterSpAttr = root.attr('showMasterSp') + const showMasterSp = showMasterSpAttr !== '0' + + return { + index, + nodes, + background, + layoutIndex, + rels, + slidePath, + showMasterSp, + } +} diff --git a/apps/sim/lib/pptx-renderer/model/theme.ts b/apps/sim/lib/pptx-renderer/model/theme.ts new file mode 100644 index 00000000000..2fde21c779d --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/theme.ts @@ -0,0 +1,95 @@ +/** + * Theme parser — extracts color scheme and font definitions from a:theme XML. + */ + +import type { SafeXmlNode } from '../parser/xml-parser' + +export interface ThemeData { + colorScheme: Map + majorFont: { latin: string; ea: string; cs: string } + minorFont: { latin: string; ea: string; cs: string } + fillStyles: SafeXmlNode[] // from a:fillStyleLst children (indexed 1-based) + lineStyles: SafeXmlNode[] // from a:lnStyleLst children (indexed 1-based) + effectStyles: SafeXmlNode[] // from a:effectStyleLst children (indexed 1-based) +} + +/** Known color scheme slot names in a:clrScheme. */ +const COLOR_SLOTS = [ + 'dk1', + 'dk2', + 'lt1', + 'lt2', + 'accent1', + 'accent2', + 'accent3', + 'accent4', + 'accent5', + 'accent6', + 'hlink', + 'folHlink', +] as const + +/** + * Extract a hex color value from a color definition node. + * Handles both `a:srgbClr@val` and `a:sysClr@lastClr`. + */ +function extractColor(node: SafeXmlNode): string | undefined { + const srgb = node.child('srgbClr') + if (srgb.exists()) { + return srgb.attr('val') + } + const sys = node.child('sysClr') + if (sys.exists()) { + return sys.attr('lastClr') ?? sys.attr('val') + } + return undefined +} + +/** + * Parse font info from a majorFont or minorFont node. + * Extracts typeface attributes from latin, ea, and cs child elements. + */ +function parseFontInfo(fontNode: SafeXmlNode): { latin: string; ea: string; cs: string } { + return { + latin: fontNode.child('latin').attr('typeface') ?? '', + ea: fontNode.child('ea').attr('typeface') ?? '', + cs: fontNode.child('cs').attr('typeface') ?? '', + } +} + +/** + * Parse a theme XML root (`a:theme`) into ThemeData. + */ +export function parseTheme(root: SafeXmlNode): ThemeData { + const themeElements = root.child('themeElements') + + // --- Color scheme --- + const clrScheme = themeElements.child('clrScheme') + const colorScheme = new Map() + + for (const slot of COLOR_SLOTS) { + const slotNode = clrScheme.child(slot) + if (slotNode.exists()) { + const hex = extractColor(slotNode) + if (hex !== undefined) { + colorScheme.set(slot, hex) + } + } + } + + // --- Font scheme --- + const fontScheme = themeElements.child('fontScheme') + const majorFont = parseFontInfo(fontScheme.child('majorFont')) + const minorFont = parseFontInfo(fontScheme.child('minorFont')) + + // --- Format scheme --- + const fmtScheme = themeElements.child('fmtScheme') + const fillStyleLst = fmtScheme.child('fillStyleLst') + const fillStyles: SafeXmlNode[] = fillStyleLst.allChildren() + const lnStyleLst = fmtScheme.child('lnStyleLst') + const lineStyles: SafeXmlNode[] = lnStyleLst.allChildren() + const effectStyleLst = fmtScheme.child('effectStyleLst') + const effectStyles: SafeXmlNode[] = effectStyleLst.allChildren() + + return { colorScheme, majorFont, minorFont, fillStyles, lineStyles, effectStyles } +} diff --git a/apps/sim/lib/pptx-renderer/model/xml-helpers.ts b/apps/sim/lib/pptx-renderer/model/xml-helpers.ts new file mode 100644 index 00000000000..6af748b43a9 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/model/xml-helpers.ts @@ -0,0 +1,33 @@ +import type { SafeXmlNode } from '../parser/xml-parser' + +/** + * Check whether a shape-like node contains a placeholder definition. + */ +export function isPlaceholder(node: SafeXmlNode): boolean { + const nvSpPr = node.child('nvSpPr') + if (nvSpPr.exists()) { + const nvPr = nvSpPr.child('nvPr') + if (nvPr.child('ph').exists()) return true + } + const nvPicPr = node.child('nvPicPr') + if (nvPicPr.exists()) { + const nvPr = nvPicPr.child('nvPr') + if (nvPr.child('ph').exists()) return true + } + return false +} + +/** + * Parse all attributes of a node into a local-name keyed map. + */ +export function parseAllAttributes(node: SafeXmlNode): Map { + const result = new Map() + const el = node.element + if (!el) return result + const attrs = el.attributes + for (let i = 0; i < attrs.length; i++) { + const attr = attrs[i] + result.set(attr.localName, attr.value) + } + return result +} diff --git a/apps/sim/lib/pptx-renderer/parser/rel-parser.ts b/apps/sim/lib/pptx-renderer/parser/rel-parser.ts new file mode 100644 index 00000000000..91769f2407b --- /dev/null +++ b/apps/sim/lib/pptx-renderer/parser/rel-parser.ts @@ -0,0 +1,81 @@ +/** + * Parser for .rels (Relationship) XML files in OOXML packages. + * These files map relationship IDs (rId1, rId2, ...) to targets. + */ + +import { parseXml } from './xml-parser' + +export interface RelEntry { + type: string + target: string + targetMode?: string +} + +/** + * Parse a .rels XML string into a Map of relationship ID -> RelEntry. + * + * Example input: + * ```xml + * + * + * + * ``` + */ +export function parseRels(xmlString: string): Map { + const result = new Map() + + if (!xmlString) return result + + const root = parseXml(xmlString) + if (!root.exists()) return result + + const relationships = root.children('Relationship') + for (const rel of relationships) { + const id = rel.attr('Id') + const type = rel.attr('Type') + const target = rel.attr('Target') + const targetMode = rel.attr('TargetMode') + + if (id && type !== undefined && target !== undefined) { + result.set(id, { type, target, targetMode }) + } + } + + return result +} + +/** + * Resolve a relative target path against a base path. + * + * Examples: + * resolveRelTarget('ppt/slides', '../slideLayouts/slideLayout1.xml') + * → 'ppt/slideLayouts/slideLayout1.xml' + * + * resolveRelTarget('ppt/slides', 'media/image1.png') + * → 'ppt/slides/media/image1.png' + * + * resolveRelTarget('ppt', 'slides/slide1.xml') + * → 'ppt/slides/slide1.xml' + */ +export function resolveRelTarget(basePath: string, target: string): string { + // Absolute targets (start with /) are returned as-is (strip leading /) + if (target.startsWith('/')) { + return target.slice(1) + } + + // Split the base path into segments + const baseParts = basePath.replace(/\\/g, '/').split('/').filter(Boolean) + const targetParts = target.replace(/\\/g, '/').split('/').filter(Boolean) + + // Walk through target parts, resolving '..' by popping base parts + const resolved = [...baseParts] + for (const part of targetParts) { + if (part === '..') { + resolved.pop() + } else if (part !== '.') { + resolved.push(part) + } + } + + return resolved.join('/') +} diff --git a/apps/sim/lib/pptx-renderer/parser/units.ts b/apps/sim/lib/pptx-renderer/parser/units.ts new file mode 100644 index 00000000000..e8782fa2bf9 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/parser/units.ts @@ -0,0 +1,59 @@ +/** + * Unit conversion utilities for OOXML / PPTX. + * + * PPTX uses several unit systems: + * - EMU (English Metric Units): 1 inch = 914400 EMU + * - Points: 1 inch = 72 pt + * - Hundredths of a point: used for font sizes + * - 60000ths of a degree: used for angles + * - 100000ths (percentage): used for scale factors + */ + +/** EMU to pixels (at 96 DPI). */ +export function emuToPx(emu: number): number { + return (emu / 914400) * 96 +} + +/** EMU to points. */ +export function emuToPt(emu: number): number { + return emu / 12700 +} + +/** OOXML angle (60000ths of a degree) to degrees. */ +export function angleToDeg(angle: number): number { + return angle / 60000 +} + +/** OOXML percentage (100000ths) to a decimal fraction (0..1 range for 0%..100%). */ +export function pctToDecimal(pct: number): number { + return pct / 100000 +} + +/** Hundredths of a point to points (used for font sizes in OOXML). */ +export function hundredthPtToPt(val: number): number { + return val / 100 +} + +/** Points to pixels (at 96 DPI). */ +export function ptToPx(pt: number): number { + return (pt * 96) / 72 +} + +/** + * Heuristic: detect whether a value is in EMU or points. + * Values with abs > 20000 are almost certainly EMU (a single point = 12700 EMU). + */ +export function detectUnit(value: number): 'emu' | 'point' { + return Math.abs(value) > 20000 ? 'emu' : 'point' +} + +/** + * Smart conversion to pixels: auto-detects whether the value is EMU or points + * and converts accordingly. + */ +export function smartToPx(value: number): number { + if (detectUnit(value) === 'emu') { + return emuToPx(value) + } + return ptToPx(value) +} diff --git a/apps/sim/lib/pptx-renderer/parser/xml-parser.ts b/apps/sim/lib/pptx-renderer/parser/xml-parser.ts new file mode 100644 index 00000000000..b1d121fb399 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/parser/xml-parser.ts @@ -0,0 +1,105 @@ +import { createLogger } from '@sim/logger' + +const logger = createLogger('PptxXmlParser') + +/** + * Safe XML parser using browser DOMParser. + * All operations are null-safe — accessing missing elements never crashes. + */ + +export class SafeXmlNode { + private readonly el: Element | null + + constructor(el: Element | null) { + this.el = el + } + + /** Get a string attribute value, or undefined if missing. */ + attr(name: string): string | undefined { + if (!this.el) return undefined + return this.el.hasAttribute(name) ? this.el.getAttribute(name)! : undefined + } + + /** Get a numeric attribute value, or undefined if missing or not a number. */ + numAttr(name: string): number | undefined { + const raw = this.attr(name) + if (raw === undefined) return undefined + const n = Number(raw) + return Number.isNaN(n) ? undefined : n + } + + /** + * Find the first child element matching the given localName (namespace-agnostic). + * Returns an empty SafeXmlNode if not found, so chaining never crashes. + */ + child(localName: string): SafeXmlNode { + if (!this.el) return new SafeXmlNode(null) + const children = this.el.children + for (let i = 0; i < children.length; i++) { + if (children[i].localName === localName) { + return new SafeXmlNode(children[i]) + } + } + return new SafeXmlNode(null) + } + + /** + * Get child elements, optionally filtered by localName (namespace-agnostic). + * If no localName is given, returns all direct child elements. + */ + children(localName?: string): SafeXmlNode[] { + if (!this.el) return [] + const result: SafeXmlNode[] = [] + const children = this.el.children + for (let i = 0; i < children.length; i++) { + if (localName === undefined || children[i].localName === localName) { + result.push(new SafeXmlNode(children[i])) + } + } + return result + } + + /** Get the text content, or empty string if the element is missing. */ + text(): string { + if (!this.el) return '' + return this.el.textContent ?? '' + } + + /** Whether the underlying element actually exists. */ + exists(): boolean { + return this.el !== null + } + + /** All direct child elements as SafeXmlNode[]. */ + allChildren(): SafeXmlNode[] { + return this.children() + } + + /** The localName of the underlying element, or empty string. */ + get localName(): string { + return this.el?.localName ?? '' + } + + /** Raw access to the underlying Element (may be null). */ + get element(): Element | null { + return this.el + } +} + +/** + * Parse an XML string into a SafeXmlNode wrapping the document element. + * Uses the browser's built-in DOMParser. + */ +export function parseXml(xmlString: string): SafeXmlNode { + const parser = new DOMParser() + const doc = parser.parseFromString(xmlString, 'application/xml') + + // Check for parser errors — DOMParser returns a parsererror document on failure + const errorNode = doc.querySelector('parsererror') + if (errorNode) { + logger.warn('XML parse error', { error: errorNode.textContent ?? '' }) + return new SafeXmlNode(null) + } + + return new SafeXmlNode(doc.documentElement) +} diff --git a/apps/sim/lib/pptx-renderer/parser/zip-parser.test.ts b/apps/sim/lib/pptx-renderer/parser/zip-parser.test.ts new file mode 100644 index 00000000000..a4172359a51 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/parser/zip-parser.test.ts @@ -0,0 +1,51 @@ +import JSZip from 'jszip' +import { describe, expect, it } from 'vitest' +import { parseZip } from '@/lib/pptx-renderer/parser/zip-parser' + +async function createZip(entries: Record): Promise { + const zip = new JSZip() + for (const [path, content] of Object.entries(entries)) { + zip.file(path, content) + } + return zip.generateAsync({ type: 'arraybuffer' }) +} + +describe('parseZip', () => { + it('extracts PPTX package parts into categorized maps', async () => { + const buffer = await createZip({ + '[Content_Types].xml': '', + 'ppt/presentation.xml': '', + 'ppt/_rels/presentation.xml.rels': '', + 'ppt/slides/slide1.xml': '', + 'ppt/slides/_rels/slide1.xml.rels': '', + 'ppt/media/image1.png': new Uint8Array([1, 2, 3]), + }) + + const files = await parseZip(buffer) + + expect(files.contentTypes).toBe('') + expect(files.presentation).toBe('') + expect(files.slides.get('ppt/slides/slide1.xml')).toBe('') + expect(files.slideRels.get('ppt/slides/_rels/slide1.xml.rels')).toBe('') + expect(files.media.get('ppt/media/image1.png')).toEqual(new Uint8Array([1, 2, 3])) + }) + + it('rejects archives that exceed entry limits', async () => { + const buffer = await createZip({ + '[Content_Types].xml': '', + 'ppt/presentation.xml': '', + }) + + await expect(parseZip(buffer, { maxEntries: 1 })).rejects.toThrow( + 'PPTX zip limit exceeded: entries 2 > maxEntries 1' + ) + }) + + it('rejects archives that exceed media byte limits', async () => { + const buffer = await createZip({ + 'ppt/media/image1.png': new Uint8Array([1, 2, 3, 4]), + }) + + await expect(parseZip(buffer, { maxMediaBytes: 3 })).rejects.toThrow('PPTX zip limit exceeded') + }) +}) diff --git a/apps/sim/lib/pptx-renderer/parser/zip-parser.ts b/apps/sim/lib/pptx-renderer/parser/zip-parser.ts new file mode 100644 index 00000000000..78243649c71 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/parser/zip-parser.ts @@ -0,0 +1,269 @@ +/** + * PPTX zip archive parser. + * Extracts and categorizes all files from a .pptx (which is a zip archive). + */ + +import type { JSZipObject } from 'jszip' +import JSZip from 'jszip' + +export interface PptxFiles { + contentTypes: string + presentation: string + presentationRels: string + slides: Map + slideRels: Map + slideLayouts: Map + slideLayoutRels: Map + slideMasters: Map + slideMasterRels: Map + themes: Map + media: Map + tableStyles?: string + charts: Map // ppt/charts/chart*.xml + chartStyles: Map // ppt/charts/style*.xml + chartColors: Map // ppt/charts/colors*.xml + diagramDrawings: Map // ppt/diagrams/drawing*.xml (SmartArt fallback) +} + +export interface ZipParseLimits { + /** Maximum number of non-directory entries in the zip archive. */ + maxEntries?: number + /** Maximum uncompressed size for any single entry (bytes). */ + maxEntryUncompressedBytes?: number + /** Maximum total uncompressed size across all entries (bytes). */ + maxTotalUncompressedBytes?: number + /** Maximum uncompressed size across media entries under `ppt/media/` (bytes). */ + maxMediaBytes?: number + /** Maximum concurrent zip entry reads during parsing. */ + maxConcurrency?: number +} + +function throwZipLimitExceeded(reason: string): never { + throw new Error(`PPTX zip limit exceeded: ${reason}`) +} + +function readUncompressedSize(file: JSZipObject): number | undefined { + const data = (file as JSZipObject & { _data?: { uncompressedSize?: number } })._data + const size = data?.uncompressedSize + return typeof size === 'number' && Number.isFinite(size) ? size : undefined +} + +async function mapWithConcurrency( + items: T[], + concurrency: number, + mapper: (item: T) => Promise +): Promise { + if (items.length === 0) return + const workerCount = Math.min(concurrency, items.length) + let cursor = 0 + + const workers = Array.from({ length: workerCount }, async () => { + while (true) { + const index = cursor++ + if (index >= items.length) return + await mapper(items[index]) + } + }) + + await Promise.all(workers) +} + +/** + * Parse a .pptx file buffer and extract all relevant files, categorized by type. + */ +export async function parseZip( + buffer: ArrayBuffer, + limits: ZipParseLimits = {} +): Promise { + const maxConcurrency = limits.maxConcurrency ?? 8 + if (!Number.isInteger(maxConcurrency) || maxConcurrency < 1) { + throwZipLimitExceeded(`maxConcurrency ${limits.maxConcurrency} must be an integer >= 1`) + } + + const zip = await JSZip.loadAsync(buffer) + const entries = Object.entries(zip.files).filter(([, file]) => !file.dir) + + if (limits.maxEntries !== undefined && entries.length > limits.maxEntries) { + throwZipLimitExceeded(`entries ${entries.length} > maxEntries ${limits.maxEntries}`) + } + + const knownSizeByPath = new Map() + let knownTotalBytes = 0 + let knownMediaBytes = 0 + + for (const [rawPath, file] of entries) { + const normalizedPath = rawPath.replace(/\\/g, '/') + const size = readUncompressedSize(file) + if (size === undefined) continue + + knownSizeByPath.set(normalizedPath, size) + + if (limits.maxEntryUncompressedBytes !== undefined && size > limits.maxEntryUncompressedBytes) { + throwZipLimitExceeded( + `${normalizedPath} is ${size} bytes > maxEntryUncompressedBytes ${limits.maxEntryUncompressedBytes}` + ) + } + + knownTotalBytes += size + if ( + limits.maxTotalUncompressedBytes !== undefined && + knownTotalBytes > limits.maxTotalUncompressedBytes + ) { + throwZipLimitExceeded( + `total uncompressed bytes ${knownTotalBytes} > maxTotalUncompressedBytes ${limits.maxTotalUncompressedBytes}` + ) + } + + if (normalizedPath.startsWith('ppt/media/')) { + knownMediaBytes += size + if (limits.maxMediaBytes !== undefined && knownMediaBytes > limits.maxMediaBytes) { + throwZipLimitExceeded( + `media bytes ${knownMediaBytes} > maxMediaBytes ${limits.maxMediaBytes}` + ) + } + } + } + + const result: PptxFiles = { + contentTypes: '', + presentation: '', + presentationRels: '', + slides: new Map(), + slideRels: new Map(), + slideLayouts: new Map(), + slideLayoutRels: new Map(), + slideMasters: new Map(), + slideMasterRels: new Map(), + themes: new Map(), + media: new Map(), + charts: new Map(), + chartStyles: new Map(), + chartColors: new Map(), + diagramDrawings: new Map(), + } + + let unknownMediaBytes = 0 + + await mapWithConcurrency(entries, maxConcurrency, async ([path, file]) => { + const normalizedPath = path.replace(/\\/g, '/') + + // --- Content Types --- + if (normalizedPath === '[Content_Types].xml') { + result.contentTypes = await file.async('string') + return + } + + // --- Presentation --- + if (normalizedPath === 'ppt/presentation.xml') { + result.presentation = await file.async('string') + return + } + + // --- Presentation Rels --- + if (normalizedPath === 'ppt/_rels/presentation.xml.rels') { + result.presentationRels = await file.async('string') + return + } + + // --- Table Styles --- + if (normalizedPath === 'ppt/tableStyles.xml') { + result.tableStyles = await file.async('string') + return + } + + // --- Media (binary) --- + if (normalizedPath.startsWith('ppt/media/')) { + const bytes = await file.async('uint8array') + if (!knownSizeByPath.has(normalizedPath)) { + const size = bytes.byteLength + if ( + limits.maxEntryUncompressedBytes !== undefined && + size > limits.maxEntryUncompressedBytes + ) { + throwZipLimitExceeded( + `${normalizedPath} is ${size} bytes > maxEntryUncompressedBytes ${limits.maxEntryUncompressedBytes}` + ) + } + unknownMediaBytes += size + if ( + limits.maxMediaBytes !== undefined && + knownMediaBytes + unknownMediaBytes > limits.maxMediaBytes + ) { + throwZipLimitExceeded( + `media bytes ${knownMediaBytes + unknownMediaBytes} > maxMediaBytes ${limits.maxMediaBytes}` + ) + } + } + result.media.set(normalizedPath, bytes) + return + } + + // --- Slide Rels (must check before slides to avoid false match) --- + if (/^ppt\/slides\/_rels\/slide\d+\.xml\.rels$/.test(normalizedPath)) { + result.slideRels.set(normalizedPath, await file.async('string')) + return + } + + // --- Slides --- + if (/^ppt\/slides\/slide\d+\.xml$/.test(normalizedPath)) { + result.slides.set(normalizedPath, await file.async('string')) + return + } + + // --- Slide Layout Rels --- + if (/^ppt\/slideLayouts\/_rels\/slideLayout\d+\.xml\.rels$/.test(normalizedPath)) { + result.slideLayoutRels.set(normalizedPath, await file.async('string')) + return + } + + // --- Slide Layouts --- + if (/^ppt\/slideLayouts\/slideLayout\d+\.xml$/.test(normalizedPath)) { + result.slideLayouts.set(normalizedPath, await file.async('string')) + return + } + + // --- Slide Master Rels --- + if (/^ppt\/slideMasters\/_rels\/slideMaster\d+\.xml\.rels$/.test(normalizedPath)) { + result.slideMasterRels.set(normalizedPath, await file.async('string')) + return + } + + // --- Slide Masters --- + if (/^ppt\/slideMasters\/slideMaster\d+\.xml$/.test(normalizedPath)) { + result.slideMasters.set(normalizedPath, await file.async('string')) + return + } + + // --- Themes --- + if (/^ppt\/theme\/theme\d+\.xml$/.test(normalizedPath)) { + result.themes.set(normalizedPath, await file.async('string')) + return + } + + // --- Charts --- + if (/^ppt\/charts\/chart\d+\.xml$/.test(normalizedPath)) { + result.charts.set(normalizedPath, await file.async('string')) + return + } + + // --- Chart Styles --- + if (/^ppt\/charts\/style\d+\.xml$/.test(normalizedPath)) { + result.chartStyles.set(normalizedPath, await file.async('string')) + return + } + + // --- Chart Colors --- + if (/^ppt\/charts\/colors\d+\.xml$/.test(normalizedPath)) { + result.chartColors.set(normalizedPath, await file.async('string')) + return + } + + // --- Diagram Drawings (SmartArt fallback) --- + if (/^ppt\/diagrams\/drawing\d+\.xml$/.test(normalizedPath)) { + result.diagramDrawings.set(normalizedPath, await file.async('string')) + return + } + }) + + return result +} diff --git a/apps/sim/lib/pptx-renderer/renderer/background-renderer.test.ts b/apps/sim/lib/pptx-renderer/renderer/background-renderer.test.ts new file mode 100644 index 00000000000..c451a3fd400 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/background-renderer.test.ts @@ -0,0 +1,98 @@ +/** + * @vitest-environment jsdom + */ +import { describe, expect, it } from 'vitest' +import { parseXml } from '@/lib/pptx-renderer/parser/xml-parser' +import { renderBackground } from '@/lib/pptx-renderer/renderer/background-renderer' +import type { RenderContext } from '@/lib/pptx-renderer/renderer/render-context' + +const EMPTY_NODE = parseXml( + '' +) + +function createContext(backgroundXml: string): RenderContext { + const background = parseXml(backgroundXml) + const slide = { + index: 0, + nodes: [], + background, + layoutIndex: '', + rels: new Map(), + slidePath: 'ppt/slides/slide1.xml', + showMasterSp: true, + } + + return { + presentation: { + width: 960, + height: 540, + slides: [slide], + layouts: new Map(), + masters: new Map(), + themes: new Map(), + slideToLayout: new Map(), + layoutToMaster: new Map(), + masterToTheme: new Map(), + media: new Map(), + charts: new Map(), + isWps: false, + }, + slide, + theme: { + colorScheme: new Map([['bg1', '000000']]), + majorFont: { latin: 'Calibri', ea: '', cs: '' }, + minorFont: { latin: 'Calibri', ea: '', cs: '' }, + fillStyles: [], + lineStyles: [], + effectStyles: [], + }, + master: { + colorMap: new Map(), + textStyles: {}, + placeholders: [], + spTree: EMPTY_NODE, + rels: new Map(), + }, + layout: { + placeholders: [], + spTree: EMPTY_NODE, + rels: new Map(), + showMasterSp: true, + }, + mediaUrlCache: new Map(), + colorCache: new Map(), + } +} + +describe('renderBackground', () => { + it('renders bgRef colors that resolve to black after modifiers', () => { + const ctx = createContext(` + + + + + + + + `) + const container = document.createElement('div') + + renderBackground(ctx, container) + + expect(container.style.backgroundColor).toBe('rgb(0, 0, 0)') + }) + + it('keeps bgRef without a color node on the white fallback', () => { + const ctx = createContext(` + + + + `) + const container = document.createElement('div') + + renderBackground(ctx, container) + + expect(container.style.backgroundColor).toBe('rgb(255, 255, 255)') + }) +}) diff --git a/apps/sim/lib/pptx-renderer/renderer/background-renderer.ts b/apps/sim/lib/pptx-renderer/renderer/background-renderer.ts new file mode 100644 index 00000000000..be227229642 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/background-renderer.ts @@ -0,0 +1,208 @@ +/** + * Background renderer — resolves and applies slide/layout/master backgrounds. + */ + +import type { RelEntry } from '../parser/rel-parser' +import type { SafeXmlNode } from '../parser/xml-parser' +import { hexToRgb } from '../utils/color' +import { getOrCreateBlobUrl, resolveMediaPath } from '../utils/media' +import type { RenderContext } from './render-context' +import { resolveColor, resolveFill } from './style-resolver' + +const COLOR_NODE_NAMES = new Set([ + 'srgbClr', + 'schemeClr', + 'sysClr', + 'prstClr', + 'hslClr', + 'scrgbClr', +]) + +/** + * Check whether a node contains a supported OOXML color node. + */ +function hasColorNode(node: SafeXmlNode): boolean { + if (COLOR_NODE_NAMES.has(node.localName)) { + return true + } + + return node.allChildren().some((child) => COLOR_NODE_NAMES.has(child.localName)) +} + +/** + * Composite a semi-transparent color on white so the result is always opaque. + * This prevents the slide background from becoming see-through when embedded + * in containers with dark backgrounds (e.g. e2e-compare panels). + */ +function compositeOnWhite(r: number, g: number, b: number, a: number): string { + const cr = Math.round(r * a + 255 * (1 - a)) + const cg = Math.round(g * a + 255 * (1 - a)) + const cb = Math.round(b * a + 255 * (1 - a)) + return `rgb(${cr},${cg},${cb})` +} + +/** + * Render the background for a slide onto the container element. + * + * Background priority: slide.background -> layout.background -> master.background. + * The first found background is used. + */ +export function renderBackground(ctx: RenderContext, container: HTMLElement): void { + // Find the first available background in the inheritance chain, + // and track which rels map to use for resolving image references + let bgNode: SafeXmlNode | undefined + let bgRels: Map = ctx.slide.rels + + if (ctx.slide.background) { + bgNode = ctx.slide.background + bgRels = ctx.slide.rels + } else if (ctx.layout.background) { + bgNode = ctx.layout.background + bgRels = ctx.layout.rels + } else if (ctx.master.background) { + bgNode = ctx.master.background + bgRels = ctx.master.rels + } + + if (!bgNode) { + container.style.backgroundColor = '#FFFFFF' + return + } + + // Parse p:bg > p:bgPr + const bgPr = bgNode.child('bgPr') + if (bgPr.exists()) { + renderBgPr(bgPr, ctx, container, bgRels) + return + } + + // Parse p:bg > p:bgRef (theme reference) + const bgRef = bgNode.child('bgRef') + if (bgRef.exists()) { + renderBgRef(bgRef, ctx, container) + return + } + + // Fallback + container.style.backgroundColor = '#FFFFFF' +} + +/** + * Render background from bgPr (background properties). + * Contains direct fill definitions: solidFill, gradFill, blipFill, etc. + */ +function renderBgPr( + bgPr: SafeXmlNode, + ctx: RenderContext, + container: HTMLElement, + rels?: Map +): void { + // solidFill + const solidFill = bgPr.child('solidFill') + if (solidFill.exists()) { + const { color, alpha } = resolveColor(solidFill, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b } = hexToRgb(hex) + container.style.backgroundColor = compositeOnWhite(r, g, b, alpha) + } else { + container.style.backgroundColor = hex + } + return + } + + // gradFill + const gradFill = bgPr.child('gradFill') + if (gradFill.exists()) { + const css = resolveFill(bgPr, ctx) + if (css) { + container.style.background = css + } + return + } + + // blipFill (image background) + const blipFill = bgPr.child('blipFill') + if (blipFill.exists()) { + renderBlipBackground(blipFill, ctx, container, rels) + return + } + + // noFill — still render as white; the slide is a self-contained element + // and transparent backgrounds break when embedded in dark containers + const noFill = bgPr.child('noFill') + if (noFill.exists()) { + container.style.backgroundColor = '#FFFFFF' + return + } +} + +/** + * Render background from bgRef (theme format scheme reference). + * Simplified: just resolve the color from the reference. + */ +function renderBgRef(bgRef: SafeXmlNode, ctx: RenderContext, container: HTMLElement): void { + // bgRef may contain a color child (schemeClr, srgbClr, etc.) + if (!hasColorNode(bgRef)) { + container.style.backgroundColor = '#FFFFFF' + return + } + + const { color, alpha } = resolveColor(bgRef, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b } = hexToRgb(hex) + container.style.backgroundColor = compositeOnWhite(r, g, b, alpha) + } else { + container.style.backgroundColor = hex + } +} + +/** + * Render a blip (image) fill as a CSS background. + */ +function renderBlipBackground( + blipFill: SafeXmlNode, + ctx: RenderContext, + container: HTMLElement, + rels?: Map +): void { + const blip = blipFill.child('blip') + const embedId = blip.attr('embed') ?? blip.attr('r:embed') + + if (!embedId) return + + // Resolve image from rels + media (use provided rels or fall back to slide rels) + const relsMap = rels ?? ctx.slide.rels + const rel = relsMap.get(embedId) + if (!rel) return + + const mediaPath = resolveMediaPath(rel.target) + const data = ctx.presentation.media.get(mediaPath) + if (!data) return + + const url = getOrCreateBlobUrl(mediaPath, data, ctx.mediaUrlCache) + + container.style.backgroundImage = `url("${url}")` + + // Check for stretch or tile mode + const stretch = blipFill.child('stretch') + if (stretch.exists()) { + container.style.backgroundSize = 'cover' + container.style.backgroundPosition = 'center' + container.style.backgroundRepeat = 'no-repeat' + + // Parse fillRect for non-uniform stretch + const fillRect = stretch.child('fillRect') + if (fillRect.exists()) { + // fillRect specifies insets — if all zero, it's a full stretch + container.style.backgroundSize = '100% 100%' + } + } + + const tile = blipFill.child('tile') + if (tile.exists()) { + container.style.backgroundRepeat = 'repeat' + container.style.backgroundSize = 'auto' + } +} diff --git a/apps/sim/lib/pptx-renderer/renderer/chart-renderer.ts b/apps/sim/lib/pptx-renderer/renderer/chart-renderer.ts new file mode 100644 index 00000000000..2fbbc571813 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/chart-renderer.ts @@ -0,0 +1,3413 @@ +import { createLogger } from '@sim/logger' + +const logger = createLogger('PptxChartRenderer') + +/** + * Chart renderer — converts OOXML chart XML into ECharts visualizations. + */ + +import * as echarts from 'echarts' +import type { ChartNodeData } from '../model/nodes/chart-node' +import type { SafeXmlNode } from '../parser/xml-parser' +import type { RenderContext } from './render-context' +import { resolveColor } from './style-resolver' + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +interface SeriesData { + name: string + order: number // c:order val — used to sort series into correct sequence + categories: string[] + values: number[] + xValues?: number[] // scatter chart x values from c:xVal + bubbleSizes?: number[] // bubble chart sizes from c:bubbleSize + colorHex?: string | object // optional explicit series color (hex string or ECharts gradient) + dataPointColors?: (string | undefined)[] // per-point colors (for pie charts) + formatCode?: string // numCache formatCode (e.g. "0%", "0.0%", "General") + markerSymbol?: string // OOXML c:marker > c:symbol val + markerSize?: number // OOXML c:marker > c:size val (points) + smooth?: boolean // OOXML c:smooth val for scatter/line-like charts + lineWidth?: number // c:spPr > a:ln@w converted to renderer px scale +} + +/** Parsed axis information from plotArea valAx / catAx / dateAx. */ +interface AxisInfo { + deleted: boolean + tickLblPos: string // 'nextTo' | 'none' | 'high' | 'low' + numFmt?: string // formatCode from axis numFmt + min?: number + max?: number + hasMajorGridlines: boolean + orientation: string // 'minMax' | 'maxMin' + labelColor?: string // hex color from txPr for axis labels + labelFontSize?: number // px from txPr defRPr@sz + lineColor?: string // hex color from spPr > ln for axis line +} + +interface DataLabelConfig { + showVal: boolean + showCatName: boolean + showSerName: boolean + showPercent: boolean + position?: string // 'outEnd', 'inEnd', 'ctr', 'bestFit' + color?: string // text color from dLbls > txPr + fontSize?: number // font size from dLbls > txPr > defRPr@sz + bold?: boolean // font bold from dLbls > txPr > defRPr@b +} + +type OoxmlChartType = + | 'barChart' + | 'bar3DChart' + | 'lineChart' + | 'line3DChart' + | 'areaChart' + | 'area3DChart' + | 'pieChart' + | 'pie3DChart' + | 'doughnutChart' + | 'radarChart' + | 'scatterChart' + | 'bubbleChart' + | 'stockChart' + | 'surface3DChart' + +// --------------------------------------------------------------------------- +// Chart Type Mapping +// --------------------------------------------------------------------------- + +const CHART_TYPE_ELEMENTS: OoxmlChartType[] = [ + 'barChart', + 'bar3DChart', + 'lineChart', + 'line3DChart', + 'areaChart', + 'area3DChart', + 'pieChart', + 'pie3DChart', + 'doughnutChart', + 'radarChart', + 'scatterChart', + 'bubbleChart', + 'stockChart', + 'surface3DChart', +] + +// --------------------------------------------------------------------------- +// Data Extraction Helpers +// --------------------------------------------------------------------------- + +/** + * Extract text values from a strRef or strCache structure. + * Path: strRef > strCache > pt (with idx attr) > v + */ +function extractStringValues(refNode: SafeXmlNode): string[] { + const cache = refNode.child('strRef').exists() + ? refNode.child('strRef').child('strCache') + : refNode.child('strCache') + + if (!cache.exists()) { + // Try numRef > numCache as fallback (categories can be numeric) + const numCache = refNode.child('numRef').exists() + ? refNode.child('numRef').child('numCache') + : refNode.child('numCache') + if (numCache.exists()) { + return extractNumericValuesAsStrings(numCache) + } + return [] + } + + const ptCount = cache.child('ptCount').numAttr('val') ?? 0 + const values: string[] = new Array(ptCount).fill('') + + for (const pt of cache.children('pt')) { + const idx = pt.numAttr('idx') + if (idx !== undefined) { + const v = pt.child('v').text() + values[idx] = v + } + } + + return values +} + +/** + * Extract the formatCode from a numRef > numCache > formatCode structure. + */ +function extractFormatCode(refNode: SafeXmlNode): string | undefined { + const cache = refNode.child('numRef').exists() + ? refNode.child('numRef').child('numCache') + : refNode.child('numCache') + + if (!cache.exists()) return undefined + + const fc = cache.child('formatCode') + if (!fc.exists()) return undefined + + const text = fc.text() + return text || undefined +} + +/** + * Format a numeric value according to its numCache formatCode. + * Handles percentage formats (containing '%') and general numbers. + */ +function formatValue(value: number, formatCode: string | undefined): string { + if (!formatCode || formatCode === 'General') { + // No format or "General": show a sensible number of decimal places. + // Avoid ugly long floats (e.g. 0.91509433962264153 → "0.92"). + if (Number.isInteger(value)) return String(value) + // Up to 2 decimal places, strip trailing zeros + return Number.parseFloat(value.toFixed(2)).toString() + } + + // Percentage format: the raw value is a fraction (e.g., 0.213 means 21.3%) + if (formatCode.includes('%')) { + // Determine decimal places from the format code + const match = formatCode.match(/0\.(0+)%/) + const decimals = match ? match[1].length : 0 + const pctValue = value * 100 + return `${pctValue.toFixed(decimals)}%` + } + + // Numeric format with decimal places: e.g. "0.00", "#,##0.0" + const decMatch = formatCode.match(/\.(0+|#+)/) + if (decMatch) { + const decimals = decMatch[1].length + return Number.parseFloat(value.toFixed(decimals)).toString() + } + + // Integer format: "0", "#,##0" + if (/^[#0,]+$/.test(formatCode.replace(/[[\]"\\]/g, ''))) { + return Math.round(value).toString() + } + + // Fallback: reasonable precision + if (Number.isInteger(value)) return String(value) + return Number.parseFloat(value.toFixed(2)).toString() +} + +/** + * Extract numeric values from a numRef > numCache structure. + */ +function extractNumericValues(refNode: SafeXmlNode): number[] { + const cache = refNode.child('numRef').exists() + ? refNode.child('numRef').child('numCache') + : refNode.child('numCache') + + if (!cache.exists()) return [] + + const ptCount = cache.child('ptCount').numAttr('val') ?? 0 + const values: number[] = new Array(ptCount).fill(0) + + for (const pt of cache.children('pt')) { + const idx = pt.numAttr('idx') + if (idx !== undefined) { + const v = Number.parseFloat(pt.child('v').text()) + values[idx] = Number.isNaN(v) ? 0 : v + } + } + + return values +} + +/** + * Extract numeric cache values as strings (for category axis that uses numbers). + */ +function extractNumericValuesAsStrings(cache: SafeXmlNode): string[] { + const ptCount = cache.child('ptCount').numAttr('val') ?? 0 + const values: string[] = new Array(ptCount).fill('') + + // Check if this is a date format — format date serial numbers to human-readable strings + const fc = cache.child('formatCode').text() + const isDateFmt = fc && /[yYmMdD]/.test(fc) && !/[#0]/.test(fc) + + for (const pt of cache.children('pt')) { + const idx = pt.numAttr('idx') + if (idx !== undefined) { + const raw = pt.child('v').text() + if (isDateFmt && raw) { + values[idx] = excelSerialToDateString(Number.parseFloat(raw)) + } else { + values[idx] = raw + } + } + } + + return values +} + +/** + * Convert Excel date serial number to a locale-formatted date string. + * Excel epoch: 1899-12-30 (accounting for the Lotus 1-2-3 leap year bug). + */ +function excelSerialToDateString(serial: number): string { + if (!Number.isFinite(serial) || serial < 1) return String(serial) + // Excel serial date: 1 = 1900-01-01. + // Excel has a Lotus 1-2-3 bug where serial 60 = Feb 29, 1900 (which doesn't exist). + // For serials > 59, subtract 1 to correct for this phantom leap day. + const adjusted = serial > 59 ? serial - 1 : serial + // Use UTC to avoid locale timezone drift shifting the rendered calendar date. + const epochUtc = Date.UTC(1899, 11, 31) + const date = new Date(epochUtc + adjusted * 86400000) + // Format as YYYY/M/D (matches CJK locale conventions used in the test data) + return `${date.getUTCFullYear()}/${date.getUTCMonth() + 1}/${date.getUTCDate()}` +} + +/** + * Extract series name from c:tx element. + */ +function extractSeriesName(txNode: SafeXmlNode): string { + // Try strRef > strCache > pt > v + const strRef = txNode.child('strRef') + if (strRef.exists()) { + const strCache = strRef.child('strCache') + const pts = strCache.children('pt') + if (pts.length > 0) { + return pts[0].child('v').text() + } + } + // Try direct v element + const v = txNode.child('v') + if (v.exists()) return v.text() + return '' +} + +/** + * Resolve a color from a fill node (solidFill) to a hex string. + */ +function resolveColorToHex(fillNode: SafeXmlNode, ctx: RenderContext): string | undefined { + try { + const { color } = resolveColor(fillNode, ctx) + return color.startsWith('#') ? color : `#${color}` + } catch { + return undefined + } +} + +/** + * Resolve a color from a single gradient stop node (a:gs > color child) to hex + alpha. + */ +function resolveGradientStop( + gsNode: SafeXmlNode, + ctx: RenderContext +): { color: string; alpha: number; pos: number } | undefined { + const pos = gsNode.numAttr('pos') + if (pos === undefined) return undefined + + // Try each color type: srgbClr, schemeClr, sysClr + for (const child of gsNode.allChildren()) { + const ln = child.localName + if (ln === 'srgbClr' || ln === 'schemeClr' || ln === 'sysClr' || ln === 'prstClr') { + try { + const result = resolveColor(gsNode, ctx) + const hex = result.color.startsWith('#') ? result.color : `#${result.color}` + return { color: hex, alpha: result.alpha, pos: pos / 100000 } + } catch { + // For sysClr with lastClr, fall back to lastClr + if (ln === 'sysClr') { + const lastClr = child.attr('lastClr') + if (lastClr) { + const alphaNode = child.child('alpha') + const alphaVal = alphaNode.exists() ? (alphaNode.numAttr('val') ?? 100000) / 100000 : 1 + return { color: `#${lastClr}`, alpha: alphaVal, pos: pos / 100000 } + } + } + return undefined + } + } + } + return undefined +} + +/** + * Extract a series color from c:ser > c:spPr fill. + * Supports solidFill and gradFill (converted to ECharts LinearGradient). + * Also checks c:spPr > a:ln > a:solidFill as fallback (used by line/area charts). + */ +function extractSeriesColor(ser: SafeXmlNode, ctx: RenderContext): string | object | undefined { + const spPr = ser.child('spPr') + if (!spPr.exists()) return undefined + + // Primary: solid fill + const solidFill = spPr.child('solidFill') + if (solidFill.exists()) { + const hex = resolveColorToHex(solidFill, ctx) + if (hex) return hex + } + + // Gradient fill → ECharts LinearGradient + const gradFill = spPr.child('gradFill') + if (gradFill.exists()) { + const grad = buildEChartsGradient(gradFill, ctx) + if (grad) return grad + } + + // Fallback: line color (used by lineChart, areaChart series) + const ln = spPr.child('ln') + if (ln.exists()) { + const lnFill = ln.child('solidFill') + if (lnFill.exists()) { + const hex = resolveColorToHex(lnFill, ctx) + if (hex) return hex + } + } + + return undefined +} + +function extractSeriesLineWidth(ser: SafeXmlNode): number | undefined { + const lnWidthEmu = ser.child('spPr').child('ln').numAttr('w') + if (lnWidthEmu === undefined || lnWidthEmu <= 0) return undefined + // OOXML line width uses EMU; 12700 EMU = 1pt. Renderer text sizing already + // treats point-sized values as CSS px-like numbers, so keep the same scale here. + return Math.max(1, Number((lnWidthEmu / 12700).toFixed(3))) +} + +/** + * Build an ECharts LinearGradient from an OOXML a:gradFill node. + */ +function buildEChartsGradient(gradFill: SafeXmlNode, ctx: RenderContext): object | undefined { + const gsLst = gradFill.child('gsLst') + if (!gsLst.exists()) return undefined + + const stops: { offset: number; color: string }[] = [] + for (const gs of gsLst.children('gs')) { + const stop = resolveGradientStop(gs, ctx) + if (stop) { + // Convert color + alpha to rgba string + const hex = stop.color.replace('#', '') + const r = Number.parseInt(hex.substring(0, 2), 16) + const g = Number.parseInt(hex.substring(2, 4), 16) + const b = Number.parseInt(hex.substring(4, 6), 16) + stops.push({ + offset: stop.pos, + color: `rgba(${r},${g},${b},${stop.alpha})`, + }) + } + } + + if (stops.length < 2) return undefined + + // Sort stops by offset + stops.sort((a, b) => a.offset - b.offset) + + // Determine gradient direction from a:lin angle. Default: top-to-bottom (ang=5400000 = 90°) + const lin = gradFill.child('lin') + const angVal = lin.exists() ? (lin.numAttr('ang') ?? 5400000) : 5400000 + const angleDeg = angVal / 60000 // Convert from 60000ths of a degree + + // Map angle to x0,y0,x1,y1 (ECharts LinearGradient coordinates) + // OOXML angle: 0°=right(→), 90°=down(↓), 180°=left(←), 270°=up(↑) + // Direction vector: dx=cos(θ), dy=sin(θ) (clockwise from east in screen coords) + const rad = (angleDeg * Math.PI) / 180 + const x0 = 0.5 - 0.5 * Math.cos(rad) + const y0 = 0.5 - 0.5 * Math.sin(rad) + const x1 = 0.5 + 0.5 * Math.cos(rad) + const y1 = 0.5 + 0.5 * Math.sin(rad) + + return new echarts.graphic.LinearGradient(x0, y0, x1, y1, stops) +} + +/** + * Extract per-data-point colors from c:ser > c:dPt elements. + * Each c:dPt has c:idx and c:spPr > a:solidFill. + */ +function extractDataPointColors( + ser: SafeXmlNode, + ctx: RenderContext +): (string | undefined)[] | undefined { + const dPts = ser.children('dPt') + if (dPts.length === 0) return undefined + + const colors: (string | undefined)[] = [] + for (const dPt of dPts) { + const idx = dPt.child('idx').numAttr('val') + if (idx === undefined) continue + + const spPr = dPt.child('spPr') + if (!spPr.exists()) continue + + const solidFill = spPr.child('solidFill') + if (solidFill.exists()) { + const hex = resolveColorToHex(solidFill, ctx) + if (hex) { + while (colors.length <= idx) colors.push(undefined) + colors[idx] = hex + } + } + } + + return colors.length > 0 ? colors : undefined +} + +/** In OOXML, boolean elements are true when present unless val="0" or val="false". */ +function parseDlblBool(dLbls: SafeXmlNode, childName: string): boolean { + const el = dLbls.child(childName) + if (!el.exists()) return false + const val = el.attr('val') + return val !== '0' && val !== 'false' +} + +/** + * Extract text color from a txPr element: txPr > p > pPr > defRPr > solidFill. + */ +function extractTxPrColor(parentNode: SafeXmlNode, ctx: RenderContext): string | undefined { + const txPr = parentNode.child('txPr') + if (!txPr.exists()) return undefined + for (const p of txPr.children('p')) { + const pPr = p.child('pPr') + if (!pPr.exists()) continue + const defRPr = pPr.child('defRPr') + if (!defRPr.exists()) continue + const fill = defRPr.child('solidFill') + if (fill.exists()) { + return resolveColorToHex(fill, ctx) + } + } + return undefined +} + +/** + * Parse c:dLbls (data labels) configuration from a chart type node or series. + */ +function parseDataLabels(node: SafeXmlNode, ctx: RenderContext): DataLabelConfig | undefined { + const dLbls = node.child('dLbls') + if (!dLbls.exists()) return undefined + + const showVal = parseDlblBool(dLbls, 'showVal') + const showCatName = parseDlblBool(dLbls, 'showCatName') + const showSerName = parseDlblBool(dLbls, 'showSerName') + const showPercent = parseDlblBool(dLbls, 'showPercent') + const posNode = dLbls.child('dLblPos') + const position = posNode.exists() ? posNode.attr('val') || undefined : undefined + + const txStyle = extractTxPrStyle(dLbls, ctx) + const color = txStyle?.color ?? extractTxPrColor(dLbls, ctx) + const fontSize = txStyle?.fontSize + const bold = txStyle?.bold + + // If nothing is shown, return undefined + if (!showVal && !showCatName && !showSerName && !showPercent) return undefined + + return { showVal, showCatName, showSerName, showPercent, position, color, fontSize, bold } +} + +function parseDlblBoolOptional(dLbl: SafeXmlNode, childName: string): boolean | undefined { + const el = dLbl.child(childName) + if (!el.exists()) return undefined + const val = el.attr('val') + return val !== '0' && val !== 'false' +} + +/** + * Parse per-point data label overrides from c:dLbls > c:dLbl(idx=...). + */ +function parsePointDataLabelOverrides( + dLbls: SafeXmlNode, + ctx: RenderContext +): Map> { + const out = new Map>() + if (!dLbls.exists()) return out + for (const dLbl of dLbls.children('dLbl')) { + const idx = dLbl.child('idx').numAttr('val') + if (idx === undefined) continue + const txStyle = extractTxPrStyle(dLbl, ctx) + const posNode = dLbl.child('dLblPos') + const cfg: Partial = {} + const showVal = parseDlblBoolOptional(dLbl, 'showVal') + const showCatName = parseDlblBoolOptional(dLbl, 'showCatName') + const showSerName = parseDlblBoolOptional(dLbl, 'showSerName') + const showPercent = parseDlblBoolOptional(dLbl, 'showPercent') + if (showVal !== undefined) cfg.showVal = showVal + if (showCatName !== undefined) cfg.showCatName = showCatName + if (showSerName !== undefined) cfg.showSerName = showSerName + if (showPercent !== undefined) cfg.showPercent = showPercent + if (posNode.exists()) cfg.position = posNode.attr('val') || undefined + if (txStyle?.color) cfg.color = txStyle.color + else { + const c = extractTxPrColor(dLbl, ctx) + if (c) cfg.color = c + } + if (txStyle?.fontSize !== undefined) cfg.fontSize = txStyle.fontSize + if (txStyle?.bold !== undefined) cfg.bold = txStyle.bold + if (Object.keys(cfg).length > 0) out.set(idx, cfg) + } + return out +} + +/** + * Parse pie slice explosion values from c:ser and c:dPt elements. + */ +function parseExplosion(ser: SafeXmlNode, pointCount: number): number[] | undefined { + const explosions: number[] = new Array(pointCount).fill(0) + let hasAny = false + + // Series-level explosion + const serExplosion = ser.child('explosion').numAttr('val') ?? 0 + if (serExplosion > 0) { + explosions.fill(serExplosion) + hasAny = true + } + + // Per-point explosion overrides + const dPts = ser.children('dPt') + for (const dPt of dPts) { + const idx = dPt.child('idx').numAttr('val') + if (idx === undefined) continue + const exp = dPt.child('explosion').numAttr('val') + if (exp !== undefined && exp > 0) { + explosions[idx] = exp + hasAny = true + } + } + + return hasAny ? explosions : undefined +} + +/** + * Parse all c:ser elements from a chart type node into SeriesData[]. + * Results are sorted by c:order to match the intended series sequence. + */ +function parseSeries(chartTypeNode: SafeXmlNode, ctx: RenderContext): SeriesData[] { + const seriesArr: SeriesData[] = [] + + for (const ser of chartTypeNode.children('ser')) { + const tx = ser.child('tx') + const name = extractSeriesName(tx) + const order = ser.child('order').numAttr('val') ?? seriesArr.length + + const cat = ser.child('cat') + const categories = extractStringValues(cat) + + const val = ser.child('val') + const values = extractNumericValues(val) + const formatCode = extractFormatCode(val) + + // Scatter charts use xVal / yVal instead of cat / val + const xValNode = ser.child('xVal') + const yValNode = ser.child('yVal') + let xValues: number[] | undefined + if (yValNode.exists()) { + // yVal overrides val for scatter + const yVals = extractNumericValues(yValNode) + if (yVals.length > 0) { + values.length = 0 + values.push(...yVals) + } + } + if (xValNode.exists()) { + xValues = extractNumericValues(xValNode) + // If categories are empty but xVal exists as strings, try that + if (categories.length === 0) { + const xCats = extractStringValues(xValNode) + if (xCats.length > 0) categories.push(...xCats) + } + } + + // Bubble chart sizes from c:bubbleSize + const bubbleSizeNode = ser.child('bubbleSize') + const bubbleSizes = bubbleSizeNode.exists() ? extractNumericValues(bubbleSizeNode) : undefined + + const colorHex = extractSeriesColor(ser, ctx) + const lineWidth = extractSeriesLineWidth(ser) + const dataPointColors = extractDataPointColors(ser, ctx) + + // Extract marker info (c:marker > c:symbol, c:size) + const marker = ser.child('marker') + const markerSymbol = marker.child('symbol').attr('val') + const markerSize = marker.child('size').numAttr('val') + const smooth = ser.child('smooth').attr('val') === '1' + + seriesArr.push({ + name, + order, + categories, + values, + xValues, + bubbleSizes, + colorHex, + dataPointColors, + formatCode, + markerSymbol, + markerSize, + smooth, + lineWidth, + }) + } + + // Sort by c:order so legend/stacking matches PPT + seriesArr.sort((a, b) => a.order - b.order) + + return seriesArr +} + +// --------------------------------------------------------------------------- +// Chart Title +// --------------------------------------------------------------------------- + +/** + * Extract chart title from chartSpace > chart > title. + * Returns undefined when autoTitleDeleted val="1" (title was intentionally removed). + */ +function extractChartTitle(chartNode: SafeXmlNode, seriesArr?: SeriesData[]): string | undefined { + // Respect autoTitleDeleted: if set, the title should not be shown + const autoTitleDeleted = chartNode.child('autoTitleDeleted') + if (autoTitleDeleted.exists() && autoTitleDeleted.attr('val') === '1') { + return undefined + } + + const title = chartNode.child('title') + if (!title.exists()) { + // OOXML spec: when autoTitleDeleted is NOT "1" and there is no explicit + // , the chart auto-generates a title from the first series name. + // This applies mainly to single-series charts like pie/doughnut. + if (seriesArr && seriesArr.length === 1 && seriesArr[0].name) { + return seriesArr[0].name + } + return undefined + } + + const tx = title.child('tx') + if (!tx.exists()) return undefined + + // Try rich text: tx > rich > p > r > t + const rich = tx.child('rich') + if (rich.exists()) { + const parts: string[] = [] + for (const p of rich.children('p')) { + for (const r of p.children('r')) { + const t = r.child('t').text() + if (t) parts.push(t) + } + } + if (parts.length > 0) return parts.join('') + } + + // Try strRef + const strRef = tx.child('strRef') + if (strRef.exists()) { + const strCache = strRef.child('strCache') + const pts = strCache.children('pt') + if (pts.length > 0) return pts[0].child('v').text() + } + + return undefined +} + +/** + * Extract chart title manual layout (title > layout > manualLayout) to ECharts title position. + */ +function extractTitleManualLayout(chartNode: SafeXmlNode): Partial> { + const manual = chartNode.child('title').child('layout').child('manualLayout') + if (!manual.exists()) return {} + const out: Partial> = {} + const x = manual.child('x').numAttr('val') + const y = manual.child('y').numAttr('val') + if (x !== undefined) out.left = numToPct(x) + if (y !== undefined) out.top = numToPct(y) + return out +} + +/** + * Extract text color/font size from a txPr node: + * txPr > p > pPr > defRPr (solidFill + sz). + */ +function extractTxPrStyle( + parentNode: SafeXmlNode, + ctx: RenderContext +): { color?: string; fontSize?: number; bold?: boolean; fontFamily?: string } | undefined { + const txPr = parentNode.child('txPr') + if (!txPr.exists()) return undefined + + for (const p of txPr.children('p')) { + const pPr = p.child('pPr') + if (!pPr.exists()) continue + const defRPr = pPr.child('defRPr') + if (!defRPr.exists()) continue + + const style: { color?: string; fontSize?: number; bold?: boolean; fontFamily?: string } = {} + const fill = defRPr.child('solidFill') + if (fill.exists()) { + const c = resolveColorToHex(fill, ctx) + if (c) style.color = c + } + const sz = defRPr.numAttr('sz') + if (sz !== undefined && sz > 0) { + // OOXML sz is 1/100 pt. Keep renderer's existing px-scale convention. + style.fontSize = Math.round(sz / 100) + } + const b = defRPr.attr('b') + if (b === '1' || b === 'true') style.bold = true + else if (b === '0' || b === 'false') style.bold = false + const latinTypeface = defRPr.child('latin').attr('typeface') + const eaTypeface = defRPr.child('ea').attr('typeface') + const csTypeface = defRPr.child('cs').attr('typeface') + if (latinTypeface || eaTypeface || csTypeface) { + style.fontFamily = latinTypeface || eaTypeface || csTypeface + } + + if ( + style.color || + style.fontSize !== undefined || + style.bold !== undefined || + style.fontFamily !== undefined + ) + return style + } + return undefined +} + +function getChartThemeFontFamily(ctx: RenderContext): string | undefined { + return ( + ctx.theme.minorFont.latin || + ctx.theme.minorFont.ea || + ctx.theme.majorFont.latin || + ctx.theme.majorFont.ea || + undefined + ) +} + +// --------------------------------------------------------------------------- +// Legend +// --------------------------------------------------------------------------- + +/** Parsed legend info including overlay flag. */ +interface LegendInfo { + option: echarts.EChartsOption['legend'] + overlay: boolean // true = legend overlaps plot area (don't reserve grid space) + textStyle?: { + color?: string + fontSize?: number + fontWeight?: 'normal' | 'bold' | 'bolder' | 'lighter' | number + fontFamily?: string + } + manualLayout?: Partial> +} + +/** + * Extract legend position from chartSpace > chart > legend > legendPos. + */ +function extractLegendInfo(chartNode: SafeXmlNode, ctx: RenderContext): LegendInfo | undefined { + const legend = chartNode.child('legend') + if (!legend.exists()) return undefined + + const legendPos = legend.child('legendPos') + // OOXML default legend position is 'r' (right) per the spec, not 'b' + const posVal = legendPos.exists() ? legendPos.attr('val') || 'r' : 'r' + + const overlay = legend.child('overlay').attr('val') === '1' + + // Map OOXML positions to ECharts; keep legend inside and below chart title (avoid overlap on slide 4, 6, etc.) + const base = { confine: true as const } + const topBelowTitle = '14%' // leave room for chart title so legend does not overlap + let option: echarts.EChartsOption['legend'] + switch (posVal) { + case 'b': + option = { ...base, bottom: '5%', orient: 'horizontal' as const } + break + case 't': + option = { ...base, top: topBelowTitle, orient: 'horizontal' as const } + break + case 'l': + option = { ...base, left: '2%', top: '44%', orient: 'vertical' as const } + break + case 'r': + option = { ...base, right: '2%', top: '44%', orient: 'vertical' as const } + break + case 'tr': + option = { ...base, top: topBelowTitle, right: '2%', orient: 'vertical' as const } + break + default: + option = { ...base, right: '2%', top: '44%', orient: 'vertical' as const } + break + } + return { + option, + overlay, + textStyle: (() => { + const s = extractTxPrStyle(legend, ctx) + if (!s) return undefined + return { + ...(s.color ? { color: s.color } : {}), + ...(s.fontSize !== undefined ? { fontSize: s.fontSize } : {}), + ...(s.bold === true ? { fontWeight: 'bold' } : {}), + ...(s.fontFamily ? { fontFamily: s.fontFamily } : {}), + } + })(), + manualLayout: extractLegendManualLayout(legend), + } +} + +/** + * Parse legend/layout/manualLayout to ECharts legend position/size override. + */ +function extractLegendManualLayout( + legendNode: SafeXmlNode +): Partial> { + const manual = legendNode.child('layout').child('manualLayout') + if (!manual.exists()) return {} + const out: Partial> = {} + const x = manual.child('x').numAttr('val') + const y = manual.child('y').numAttr('val') + const w = manual.child('w').numAttr('val') + const h = manual.child('h').numAttr('val') + if (x !== undefined) out.left = numToPct(x) + if (y !== undefined) out.top = numToPct(y) + if (w !== undefined) out.width = numToPct(w) + if (h !== undefined) out.height = numToPct(h) + return out +} + +/** True when legend is positioned at top (t or tr), so plot area should reserve more top space. */ +function legendIsAtTop(legendInfo: LegendInfo | undefined): boolean { + if (!legendInfo || !legendInfo.option || typeof legendInfo.option !== 'object') return false + const o = legendInfo.option as Record + // top: 'middle' is used by right/left legends (vertically centered), not "at top" + return o.top !== undefined && o.top !== null && o.top !== 'middle' +} + +/** + * Grid top reserve in pixels. Use fixed pixels so small chart containers (e.g. in shapes) + * don't get oversized percentage reserve and avoid legend overlapping data labels. + * When legend overlay=true, don't reserve extra space for legend. + */ +function getGridTopPx(hasTitle: boolean, legendInfo: LegendInfo | undefined): number { + const atTop = legendIsAtTop(legendInfo) + const overlayLegend = legendInfo?.overlay ?? false + if (hasTitle) return atTop && !overlayLegend ? 52 : 40 + return atTop && !overlayLegend ? 32 : 20 +} + +/** Legend top in pixels when legend is at top, so it sits below title and above grid. */ +function getLegendTopPx(hasTitle: boolean, legendInfo: LegendInfo | undefined): number | undefined { + if (!legendIsAtTop(legendInfo)) return undefined + return hasTitle ? 26 : 6 +} + +function getLegendPlacement( + legendInfo: LegendInfo | undefined +): 'left' | 'right' | 'top' | 'bottom' | 'none' { + if ( + !legendInfo || + legendInfo.overlay || + !legendInfo.option || + typeof legendInfo.option !== 'object' + ) { + return 'none' + } + const opt = legendInfo.option as Record + if (opt.bottom !== undefined) return 'bottom' + if (opt.top !== undefined && opt.left === undefined && opt.right === undefined) return 'top' + if (opt.left !== undefined) return 'left' + if (opt.right !== undefined) return 'right' + return 'none' +} + +function computePieLayout( + legendInfo: LegendInfo | undefined, + isDoughnut: boolean, + showLabel: boolean +): { center: [string, string]; radius: [string, string] | string } { + const placement = getLegendPlacement(legendInfo) + let center: [string, string] = ['50%', '55%'] + let outerRadius = showLabel ? 78 : 82 + + if (placement === 'right') { + center = ['38%', '55%'] + outerRadius = 82 + } else if (placement === 'left') { + center = ['62%', '55%'] + outerRadius = 82 + } else if (placement === 'top') center = ['50%', '60%'] + else if (placement === 'bottom') center = ['50%', '45%'] + + if (placement === 'top' || placement === 'bottom') { + outerRadius -= 4 + } + + if (!isDoughnut) { + return { center, radius: `${outerRadius}%` } + } + + const innerRadius = Math.round(outerRadius * 0.57) + return { center, radius: [`${innerRadius}%`, `${outerRadius}%`] } +} + +function pieExplosionToOffset(explosion: number): number { + return Number((explosion * 4.4).toFixed(1)) +} + +/** Grid bottom in pixels — leave more room when the legend sits at the bottom. */ +function getGridBottomPx(legendInfo: LegendInfo | undefined): number { + if (legendInfo) { + const opt = legendInfo.option as Record | undefined + if (opt && opt.bottom !== undefined) { + // Legend is at bottom — reserve space for it so axis labels don't overlap + return 35 + } + } + return 8 +} +const _GRID_BOTTOM_PX = 8 // kept as default for chart types that don't call the function + +/** Map OOXML c:marker > c:symbol values to ECharts symbol names. */ +const OOXML_SYMBOL_MAP: Record = { + circle: 'circle', + square: 'rect', + diamond: 'diamond', + triangle: 'triangle', + none: 'none', + // Less common symbols — fallback to circle + star: 'circle', + dash: 'circle', + dot: 'circle', + plus: 'circle', + x: 'circle', +} + +function mapOoxmlSymbol(symbol: string | undefined): string | undefined { + if (!symbol) return undefined + return OOXML_SYMBOL_MAP[symbol] ?? 'circle' +} + +function buildLegendOption( + legendOpt: echarts.EChartsOption['legend'] | undefined, + legendInfo: LegendInfo | undefined, + legendTopPx: number | undefined, + data: (string | { name: string; icon?: string })[], + textStyle: { + color?: string + fontSize?: number + fontWeight?: 'normal' | 'bold' | 'bolder' | 'lighter' | number + fontFamily?: string + } +): echarts.EChartsOption['legend'] { + if (!legendOpt) return { show: false } + const manual = legendInfo?.manualLayout ?? {} + const top = + manual.top !== undefined ? manual.top : legendTopPx !== undefined ? legendTopPx : undefined + // PowerPoint legend icons are sharp-cornered squares; ECharts default is 25×14 roundRect. + // Set icon to 'rect' (no rounded corners) and both dimensions equal for square icons. + // However, if individual data items carry their own icon (e.g. line/radar marker symbols), + // respect those per-item icons instead of forcing 'rect' globally. + const iconSize = textStyle.fontSize ?? 10 + const hasPerItemIcons = data.some((d) => typeof d === 'object' && d.icon) + const sharedIcon = + hasPerItemIcons && + data.every( + (d) => + typeof d === 'object' && + typeof d.icon === 'string' && + d.icon === (data[0] as { icon?: string }).icon + ) + ? (data[0] as { icon?: string }).icon + : undefined + const useSharedIcon = sharedIcon !== undefined && !sharedIcon.startsWith('path://') + const legendData = useSharedIcon ? data.map((d) => (typeof d === 'string' ? d : d.name)) : data + const hasLineLikeIcons = data.some( + (d) => typeof d === 'object' && typeof d.icon === 'string' && d.icon.startsWith('path://') + ) + return { + ...legendOpt, + ...manual, + ...(top !== undefined ? { top } : {}), + ...(useSharedIcon ? { icon: sharedIcon } : hasPerItemIcons ? {} : { icon: 'rect' }), + itemWidth: hasLineLikeIcons ? Math.max(24, Math.round(iconSize * 2.2)) : iconSize, + itemHeight: hasLineLikeIcons ? Math.max(8, Math.round(iconSize * 0.9)) : iconSize, + data: legendData, + textStyle, + } +} + +type LegendOptionObject = { + show?: boolean + data?: (string | { name: string; icon?: string })[] + orient?: 'horizontal' | 'vertical' + left?: string | number + right?: string | number + top?: string | number + bottom?: string | number + icon?: string + itemWidth?: number + itemHeight?: number + textStyle?: { + color?: string + fontSize?: number + fontWeight?: 'normal' | 'bold' | 'bolder' | 'lighter' | number + fontFamily?: string + } +} + +function getLegendOptionObject(legend: echarts.EChartsOption['legend']): LegendOptionObject | null { + if (!legend) return null + return Array.isArray(legend) + ? ((legend[0] as LegendOptionObject | undefined) ?? null) + : (legend as LegendOptionObject) +} + +function pickSeriesStringColor(color: string | object | undefined, fallback: string): string { + return typeof color === 'string' ? color : fallback +} + +function lineLegendIconPath(): string { + return 'path://M2 8 L22 8' +} + +function buildSmoothScatterLineData(data: number[][], stepsPerSegment = 24): number[][] { + if (data.length < 3) return data + for (let i = 1; i < data.length; i++) { + if (data[i][0] <= data[i - 1][0]) return data + } + const tangentScale = 0.3 + const endTangentScale = 1.2 + const n = data.length + const slopes = new Array(n - 1) + for (let i = 0; i < n - 1; i++) { + slopes[i] = (data[i + 1][1] - data[i][1]) / (data[i + 1][0] - data[i][0]) + } + const tangents = new Array(n) + tangents[0] = slopes[0] + tangents[n - 1] = slopes[n - 2] * endTangentScale + for (let i = 1; i < n - 1; i++) { + tangents[i] = ((slopes[i - 1] + slopes[i]) / 2) * tangentScale + } + const out: number[][] = [[data[0][0], data[0][1]]] + for (let i = 0; i < n - 1; i++) { + const [x0, y0] = data[i] + const [x1, y1] = data[i + 1] + const dx = x1 - x0 + const m0 = tangents[i] + const m1 = tangents[i + 1] + for (let step = 1; step <= stepsPerSegment; step++) { + const t = step / stepsPerSegment + const h00 = 2 * t ** 3 - 3 * t ** 2 + 1 + const h10 = t ** 3 - 2 * t ** 2 + t + const h01 = -2 * t ** 3 + 3 * t ** 2 + const h11 = t ** 3 - t ** 2 + const x = x0 + dx * t + const y = h00 * y0 + h10 * dx * m0 + h01 * y1 + h11 * dx * m1 + out.push([Number(x.toFixed(4)), Number(y.toFixed(4))]) + } + } + + return out +} + +function hasManualGrid( + manualGrid: Partial> +): boolean { + return ( + manualGrid.left !== undefined || + manualGrid.top !== undefined || + manualGrid.width !== undefined || + manualGrid.height !== undefined + ) +} + +// --------------------------------------------------------------------------- +// Axis Parsing +// --------------------------------------------------------------------------- + +const DEFAULT_AXIS_INFO: AxisInfo = { + deleted: false, + tickLblPos: 'nextTo', + hasMajorGridlines: false, + orientation: 'minMax', +} + +/** + * Extract label color from axis txPr: txPr > p > pPr > defRPr > solidFill. + */ +function extractAxisLabelColor(ax: SafeXmlNode, ctx: RenderContext): string | undefined { + const txPr = ax.child('txPr') + if (!txPr.exists()) return undefined + + // Navigate: txPr > a:p > a:pPr > a:defRPr > a:solidFill + for (const p of txPr.children('p')) { + const pPr = p.child('pPr') + if (!pPr.exists()) continue + const defRPr = pPr.child('defRPr') + if (!defRPr.exists()) continue + const fill = defRPr.child('solidFill') + if (fill.exists()) { + return resolveColorToHex(fill, ctx) + } + } + return undefined +} + +/** + * Extract axis line color from axis spPr: spPr > ln > solidFill. + */ +function extractAxisLineColor(ax: SafeXmlNode, ctx: RenderContext): string | undefined { + const ln = ax.child('spPr').child('ln') + if (!ln.exists()) return undefined + const fill = ln.child('solidFill') + if (!fill.exists()) return undefined + return resolveColorToHex(fill, ctx) +} + +/** + * Parse a single axis node (c:valAx, c:catAx, or c:dateAx) into AxisInfo. + */ +function parseAxisNode(ax: SafeXmlNode, ctx: RenderContext): AxisInfo { + if (!ax.exists()) return { ...DEFAULT_AXIS_INFO } + const deleted = ax.child('delete').attr('val') === '1' + const tickLblPos = ax.child('tickLblPos').attr('val') || 'nextTo' + const numFmtNode = ax.child('numFmt') + const numFmt = numFmtNode.exists() ? numFmtNode.attr('formatCode') || undefined : undefined + const scaling = ax.child('scaling') + const minNode = scaling.child('min') + const maxNode = scaling.child('max') + const min = minNode.exists() ? Number.parseFloat(minNode.attr('val') || '') : undefined + const max = maxNode.exists() ? Number.parseFloat(maxNode.attr('val') || '') : undefined + const hasMajorGridlines = ax.child('majorGridlines').exists() + const orientation = scaling.child('orientation').attr('val') || 'minMax' + const txStyle = extractTxPrStyle(ax, ctx) + const labelColor = txStyle?.color ?? extractAxisLabelColor(ax, ctx) + const labelFontSize = txStyle?.fontSize + const lineColor = extractAxisLineColor(ax, ctx) + return { + deleted, + tickLblPos, + numFmt: numFmt && numFmt !== 'General' ? numFmt : undefined, + min: min !== undefined && !Number.isNaN(min) ? min : undefined, + max: max !== undefined && !Number.isNaN(max) ? max : undefined, + hasMajorGridlines, + orientation, + labelColor, + labelFontSize, + lineColor, + } +} + +/** Parse value axis and category axis from plotArea. Also checks dateAx as category fallback. */ +function parseAxes( + plotArea: SafeXmlNode, + ctx: RenderContext +): { valueAxis: AxisInfo; categoryAxis: AxisInfo } { + const valAx = plotArea.child('valAx') + const catAx = plotArea.child('catAx') + const dateAx = plotArea.child('dateAx') + return { + valueAxis: parseAxisNode(valAx, ctx), + categoryAxis: catAx.exists() ? parseAxisNode(catAx, ctx) : parseAxisNode(dateAx, ctx), + } +} + +/** + * Parse scatter/bubble axes: two valAx nodes differentiated by axPos. + * Returns X axis (bottom/top) and Y axis (left/right) separately. + */ +function parseScatterAxes( + plotArea: SafeXmlNode, + ctx: RenderContext +): { xAxis: AxisInfo; yAxis: AxisInfo } { + const allValAx = plotArea.children('valAx') + let xAxis: AxisInfo = { ...DEFAULT_AXIS_INFO } + let yAxis: AxisInfo = { ...DEFAULT_AXIS_INFO } + for (const ax of allValAx) { + const axPos = ax.child('axPos').attr('val') ?? '' + const info = parseAxisNode(ax, ctx) + if (axPos === 'b' || axPos === 't') { + xAxis = info + } else if (axPos === 'l' || axPos === 'r') { + yAxis = info + } + } + // Fallback: if only one valAx found, use first as Y axis (value) + if (allValAx.length === 1) { + yAxis = parseAxisNode(allValAx[0], ctx) + } + return { xAxis, yAxis } +} + +/** + * Apply axis visibility and styling to an ECharts axis definition. + * Handles: delete (hide everything), tickLblPos=none (hide only labels), + * min/max (scaling), numFmt (label formatter), majorGridlines (splitLine). + */ +function applyAxisInfo( + axisDef: Record, + info: AxisInfo, + kind: 'value' | 'category' +): void { + // Fully deleted axis: hide everything + if (info.deleted) { + axisDef.axisLabel = { ...((axisDef.axisLabel as object) || {}), show: false } + axisDef.axisLine = { show: false } + axisDef.axisTick = { show: false } + if (kind === 'value') axisDef.splitLine = { show: false } + return + } + + // tickLblPos=none: hide labels only, keep axis line/tick + if (info.tickLblPos === 'none') { + axisDef.axisLabel = { ...((axisDef.axisLabel as object) || {}), show: false } + } + + // Scaling min/max + if (kind === 'value') { + if (info.min !== undefined) axisDef.min = info.min + if (info.max !== undefined) axisDef.max = info.max + } + + // Axis numFmt → label formatter (only for value axis, and only if not already set by series pctFormat) + if (kind === 'value' && info.numFmt && !info.deleted && info.tickLblPos !== 'none') { + const existingLabel = (axisDef.axisLabel as Record) || {} + if (!existingLabel.formatter) { + const nf = info.numFmt + axisDef.axisLabel = { + ...existingLabel, + formatter: (val: number) => formatValue(val, nf), + } + } + } + + // Major gridlines → splitLine + if (kind === 'value') { + if (!info.hasMajorGridlines) { + axisDef.splitLine = { show: false } + } + // If has gridlines, ECharts shows them by default — no action needed + } + + // Axis label color from txPr + if (info.labelColor) { + const existingLabel = (axisDef.axisLabel as Record) || {} + axisDef.axisLabel = { ...existingLabel, color: info.labelColor } + } + if (info.labelFontSize !== undefined) { + const existingLabel = (axisDef.axisLabel as Record) || {} + axisDef.axisLabel = { ...existingLabel, fontSize: info.labelFontSize } + } + + // Axis line color from spPr > ln + if (info.lineColor) { + const existingLine = (axisDef.axisLine as Record) || {} + const existingLineStyle = (existingLine.lineStyle as Record) || {} + axisDef.axisLine = { + ...existingLine, + show: existingLine.show ?? true, + lineStyle: { ...existingLineStyle, color: info.lineColor }, + } + } +} + +// --------------------------------------------------------------------------- +// ECharts Option Builders +// --------------------------------------------------------------------------- + +/** + * Convert OOXML data label position to ECharts bar label position. + */ +function mapBarLabelPosition(pos: string | undefined, isStacked: boolean): string { + switch (pos) { + case 'outEnd': + return 'top' + case 'inEnd': + return 'insideTop' + case 'ctr': + return 'inside' + case 'inBase': + return 'insideBottom' + default: + return isStacked ? 'inside' : 'top' + } +} + +function buildBarChartOption( + chartTypeNode: SafeXmlNode, + chartNode: SafeXmlNode, + seriesArr: SeriesData[], + ctx: RenderContext +): echarts.EChartsOption { + const barDir = chartTypeNode.child('barDir').attr('val') || chartTypeNode.attr('barDir') || 'col' + const groupingNode = chartTypeNode.child('grouping') + const grouping = groupingNode.exists() ? groupingNode.attr('val') || 'clustered' : 'clustered' + const isHorizontal = barDir === 'bar' + + // Layout parameters + const gapWidth = chartTypeNode.child('gapWidth').numAttr('val') + const overlap = chartTypeNode.child('overlap').numAttr('val') + + // Use categories from the first series that has them + const categories = seriesArr.find((s) => s.categories.length > 0)?.categories || [] + + const title = extractChartTitle(chartNode, seriesArr) + const titleStyle = extractTxPrStyle(chartNode.child('title'), ctx) + const titleLayout = extractTitleManualLayout(chartNode) + const legendInfo = extractLegendInfo(chartNode, ctx) + const legendOpt = legendInfo?.option + const legendTextStyle = { fontSize: 10, ...(legendInfo?.textStyle ?? {}) } + + const isStacked = grouping === 'stacked' || grouping === 'percentStacked' + + // Parse data labels: in OOXML they can be on chart type (barChart) or on series (ser); try both + let sharedLabels = parseDataLabels(chartTypeNode, ctx) + if (!sharedLabels) { + const firstSer = chartTypeNode.children('ser')[0] + if (firstSer?.exists()) sharedLabels = parseDataLabels(firstSer, ctx) + } + const serNodesByOrder = chartTypeNode + .children('ser') + .map((ser, i) => ({ ser, order: ser.child('order').numAttr('val') ?? i })) + .sort((a, b) => a.order - b.order) + .map((x) => x.ser) + + const series: echarts.BarSeriesOption[] = seriesArr.map((s, idx) => { + // Capture formatCode for use in label formatter closure + const fc = s.formatCode + const perSeriesLabels = + parseDataLabels(serNodesByOrder[idx] ?? chartTypeNode, ctx) ?? sharedLabels + + const buildLabel = ( + cfg: DataLabelConfig | Partial | undefined + ): echarts.BarSeriesOption['label'] => + cfg?.showVal + ? { + show: true, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + position: mapBarLabelPosition(cfg.position, isStacked) as any, + fontSize: cfg.fontSize ?? 9, + ...(cfg.color ? { color: cfg.color } : {}), + ...(cfg.bold === true ? { fontWeight: 'bold' } : {}), + // eslint-disable-next-line @typescript-eslint/no-explicit-any + formatter: (params: any) => { + const rawVal = params?.value + const val = + rawVal && typeof rawVal === 'object' && 'value' in rawVal ? rawVal.value : rawVal + if (val === 0 || val === null) return '' + return formatValue(val, fc) + }, + } + : undefined + + // Per-series label config (override shared) + const label: echarts.BarSeriesOption['label'] = buildLabel(perSeriesLabels) + const dLblsNode = (serNodesByOrder[idx] ?? chartTypeNode).child('dLbls') + const pointOverrides = parsePointDataLabelOverrides(dLblsNode, ctx) + const data: echarts.BarSeriesOption['data'] = s.values.map((v, pointIdx) => { + const ov = pointOverrides.get(pointIdx) + if (!ov) return v + const merged: DataLabelConfig = { + showVal: perSeriesLabels?.showVal ?? false, + showCatName: perSeriesLabels?.showCatName ?? false, + showSerName: perSeriesLabels?.showSerName ?? false, + showPercent: perSeriesLabels?.showPercent ?? false, + position: perSeriesLabels?.position, + color: perSeriesLabels?.color, + fontSize: perSeriesLabels?.fontSize, + bold: perSeriesLabels?.bold, + ...ov, + } + return { + value: v, + label: buildLabel(merged), + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } as any + }) + + return { + type: 'bar' as const, + name: s.name, + data, + stack: isStacked ? 'total' : undefined, + itemStyle: s.colorHex ? { color: s.colorHex } : undefined, + label, + barGap: overlap !== undefined ? `${-overlap}%` : undefined, + // OOXML gapWidth = gap-between-groups / single-bar-width × 100. + // For N clustered bars: categoryBand = N × barWidth + gap, gap = gapWidth/100 × barWidth. + // ECharts barCategoryGap = gap / categoryBand = gapWidth / (100×N + gapWidth). + // For stacked bars N=1 since all series share one bar slot. + barCategoryGap: + gapWidth !== undefined + ? `${Math.round((gapWidth * 100) / (100 * (isStacked ? 1 : seriesArr.length) + gapWidth))}%` + : undefined, + } + }) + + const plotArea = chartNode.child('plotArea') + const { valueAxis, categoryAxis } = parseAxes(plotArea, ctx) + + const categoryAxisDef: Record = { + type: 'category', + data: categories, + axisLabel: { interval: 0, rotate: categories.length > 6 ? 30 : 0, fontSize: 10 }, + } + applyAxisInfo(categoryAxisDef, categoryAxis, 'category') + + // Check if any series uses percentage format; axis numFmt takes priority + const pctFormat = + valueAxis.numFmt || seriesArr.find((s) => s.formatCode?.includes('%'))?.formatCode + const valueAxisDef: Record = { + type: 'value', + ...(pctFormat + ? { + axisLabel: { + formatter: (val: number) => formatValue(val, pctFormat), + }, + } + : {}), + } + applyAxisInfo(valueAxisDef, valueAxis, 'value') + + const gridTop = getGridTopPx(!!title, legendInfo) + const legendTopPx = getLegendTopPx(!!title, legendInfo) + // When value axis is hidden, reduce left/right padding so bars use full width + const gridLeft = valueAxis.deleted && !isHorizontal ? 4 : 10 + const gridRight = 10 + // Determine a shared format code for tooltips: prefer axis numFmt, then first series formatCode + const tooltipFmt = pctFormat || seriesArr.find((s) => s.formatCode)?.formatCode + const gridBottom = getGridBottomPx(legendInfo) + const manualGrid = extractManualLayoutGrid(chartNode) + const containLabel = !hasManualGrid(manualGrid) + + return { + title: title + ? { + text: title, + left: 'center', + ...titleLayout, + textStyle: { fontSize: 12, ...(titleStyle ?? {}) }, + } + : undefined, + tooltip: { + trigger: 'axis' as const, + ...(tooltipFmt + ? { + valueFormatter: (value: unknown) => + formatValue( + Array.isArray(value) ? (value[0] as number) : (value as number), + tooltipFmt + ), + } + : {}), + }, + legend: buildLegendOption( + legendOpt, + legendInfo, + legendTopPx, + seriesArr.map((s) => s.name), + legendTextStyle + ), + grid: { + containLabel, + left: gridLeft, + right: gridRight, + top: gridTop, + bottom: gridBottom, + ...manualGrid, + }, + xAxis: isHorizontal ? valueAxisDef : categoryAxisDef, + yAxis: isHorizontal ? categoryAxisDef : valueAxisDef, + series, + } as echarts.EChartsOption +} + +function buildLineChartOption( + chartTypeNode: SafeXmlNode, + chartNode: SafeXmlNode, + seriesArr: SeriesData[], + ctx: RenderContext, + isArea: boolean +): echarts.EChartsOption { + const categories = seriesArr.find((s) => s.categories.length > 0)?.categories || [] + const title = extractChartTitle(chartNode, seriesArr) + const titleStyle = extractTxPrStyle(chartNode.child('title'), ctx) + const titleLayout = extractTitleManualLayout(chartNode) + const legendInfo = extractLegendInfo(chartNode, ctx) + const legendOpt = legendInfo?.option + const legendTextStyle = { fontSize: 10, ...(legendInfo?.textStyle ?? {}) } + + const series: echarts.LineSeriesOption[] = seriesArr.map((s) => { + const echartsSymbol = mapOoxmlSymbol(s.markerSymbol) + const showSymbol = echartsSymbol !== undefined ? echartsSymbol !== 'none' : undefined + const lineWidth = s.lineWidth ?? 3 + const lineStyle = s.colorHex + ? { color: s.colorHex, width: lineWidth, cap: 'round' as const, join: 'round' as const } + : { width: lineWidth, cap: 'round' as const, join: 'round' as const } + return { + type: 'line' as const, + name: s.name, + data: s.values, + areaStyle: isArea ? (s.colorHex ? { color: s.colorHex } : {}) : undefined, + itemStyle: s.colorHex ? { color: s.colorHex } : undefined, + lineStyle, + ...(echartsSymbol && echartsSymbol !== 'none' ? { symbol: echartsSymbol } : {}), + ...(s.markerSize ? { symbolSize: s.markerSize } : {}), + ...(showSymbol !== undefined ? { showSymbol } : {}), + z: 3, + } + }) + + const plotArea = chartNode.child('plotArea') + const { valueAxis, categoryAxis } = parseAxes(plotArea, ctx) + + const pctFormat = + valueAxis.numFmt || seriesArr.find((s) => s.formatCode?.includes('%'))?.formatCode + const yAxisDef: Record = { + type: 'value', + ...(pctFormat + ? { + axisLabel: { + formatter: (val: number) => formatValue(val, pctFormat), + }, + } + : {}), + } + applyAxisInfo(yAxisDef, valueAxis, 'value') + + const xAxisDef: Record = { + type: 'category', + data: categories, + axisLabel: { interval: 0, rotate: categories.length > 6 ? 30 : 0 }, + } + applyAxisInfo(xAxisDef, categoryAxis, 'category') + + const gridTop = getGridTopPx(!!title, legendInfo) + const legendTopPx = getLegendTopPx(!!title, legendInfo) + const gridLeft = valueAxis.deleted ? 4 : 10 + const tooltipFmt = pctFormat || seriesArr.find((s) => s.formatCode)?.formatCode + const gridBottom = getGridBottomPx(legendInfo) + const manualGrid = extractManualLayoutGrid(chartNode) + const containLabel = !hasManualGrid(manualGrid) + return { + title: title + ? { + text: title, + left: 'center', + ...titleLayout, + textStyle: { fontSize: 14, ...(titleStyle ?? {}) }, + } + : undefined, + tooltip: { + trigger: 'axis' as const, + ...(tooltipFmt + ? { + valueFormatter: (value: unknown) => + formatValue( + Array.isArray(value) ? (value[0] as number) : (value as number), + tooltipFmt + ), + } + : {}), + }, + legend: buildLegendOption( + legendOpt, + legendInfo, + legendTopPx, + seriesArr.map((s) => { + const icon = mapOoxmlSymbol(s.markerSymbol) + return icon && icon !== 'none' + ? { name: s.name, icon } + : { name: s.name, icon: lineLegendIconPath() } + }), + legendTextStyle + ), + grid: { + containLabel, + left: gridLeft, + right: 10, + top: gridTop, + bottom: gridBottom, + ...manualGrid, + }, + xAxis: xAxisDef, + yAxis: yAxisDef, + series, + } +} + +function buildPieChartOption( + chartTypeNode: SafeXmlNode, + chartNode: SafeXmlNode, + seriesArr: SeriesData[], + isDoughnut: boolean, + ctx: RenderContext +): echarts.EChartsOption { + const title = extractChartTitle(chartNode, seriesArr) + const titleStyle = extractTxPrStyle(chartNode.child('title'), ctx) + const titleLayout = extractTitleManualLayout(chartNode) + const legendInfo = extractLegendInfo(chartNode, ctx) + const legendOpt = legendInfo?.option + const legendTextStyle = { fontSize: 10, ...(legendInfo?.textStyle ?? {}) } + + // Pie charts typically use the first series + const firstSeries = seriesArr[0] + if (!firstSeries) { + return { title: title ? { text: title } : undefined } + } + + // Parse data labels: for pie, prefer first series (ser) over chart-type — left/right pies may differ + const firstSer = chartTypeNode.children('ser')[0] + let sharedLabels = firstSer?.exists() ? parseDataLabels(firstSer, ctx) : undefined + if (!sharedLabels) sharedLabels = parseDataLabels(chartTypeNode, ctx) + + // Check if dLbls explicitly exists — if it does but parseDataLabels returned undefined, + // that means all show flags are explicitly false → labels should be hidden. + const hasDLblsNode = + (firstSer?.exists() && firstSer.child('dLbls').exists()) || + chartTypeNode.child('dLbls').exists() + const dLblsExplicitlyOff = hasDLblsNode && !sharedLabels + + // Parse explosion from the first c:ser element + const explosions = firstSer ? parseExplosion(firstSer, firstSeries.categories.length) : undefined + + const pieData = firstSeries.categories.map((cat, i) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const item: any = { + name: cat || `Item ${i + 1}`, + value: firstSeries.values[i] ?? 0, + } + // Per-point color + if (firstSeries.dataPointColors?.[i]) { + item.itemStyle = { color: firstSeries.dataPointColors[i] } + } + // Explosion (selected offset) + if (explosions?.[i] && explosions[i] > 0) { + item.selected = true + item.selectedOffset = pieExplosionToOffset(explosions[i]) + } + return item + }) + + // Build label formatter based on data label config; show value and percent when requested + const fc = firstSeries.formatCode + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let labelFormatter: string | ((params: any) => string) = '{b}: {c} ({d}%)' + if (sharedLabels) { + if (sharedLabels.showVal && fc && fc.includes('%')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + labelFormatter = (params: any) => { + const parts: string[] = [] + if (sharedLabels!.showCatName) parts.push(params.name) + parts.push(formatValue(params.value, fc)) + if (sharedLabels!.showPercent) parts.push(`${params.percent}%`) + return parts.join(', ') + } + } else { + const parts: string[] = [] + if (sharedLabels.showCatName) parts.push('{b}') + if (sharedLabels.showVal) parts.push('{c}') + if (sharedLabels.showPercent) parts.push('{d}%') + if (parts.length > 0) { + labelFormatter = parts.join(' ') + } else { + // All show* flags are false — hide labels entirely + labelFormatter = '' + } + } + } + + // Determine whether labels should be shown: + // - If dLbls exists with all show flags=false → labels explicitly disabled + // - If no dLbls exist → keep labels hidden by default, matching PowerPoint's default pie output + // - If sharedLabels has any show flag true → show labels + const showLabel = + !dLblsExplicitlyOff && + !!sharedLabels && + (sharedLabels.showVal || + sharedLabels.showCatName || + sharedLabels.showSerName || + sharedLabels.showPercent) + const pieLayout = computePieLayout(legendInfo, isDoughnut, showLabel) + + const series: echarts.PieSeriesOption[] = [ + { + type: 'pie' as const, + name: firstSeries.name, + radius: pieLayout.radius, + center: pieLayout.center, + data: pieData, + selectedMode: explosions ? 'multiple' : false, + label: { + show: showLabel, + formatter: labelFormatter, + fontSize: sharedLabels?.fontSize ?? 10, + ...(sharedLabels?.bold === true ? { fontWeight: 'bold' as const } : {}), + position: + sharedLabels?.position === 'outEnd' + ? 'outside' + : sharedLabels?.position === 'ctr' + ? 'inside' + : 'outside', + }, + }, + ] + + const legendTopPx = getLegendTopPx(!!title, legendInfo) + const tooltipFmt = fc + return { + title: title + ? { + text: title, + left: 'center', + ...titleLayout, + textStyle: { fontSize: 12, ...(titleStyle ?? {}) }, + } + : undefined, + tooltip: { + trigger: 'item' as const, + ...(tooltipFmt + ? { + valueFormatter: (value: unknown) => + formatValue( + Array.isArray(value) ? (value[0] as number) : (value as number), + tooltipFmt + ), + } + : {}), + }, + legend: buildLegendOption( + legendOpt, + legendInfo, + legendTopPx, + firstSeries.categories, + legendTextStyle + ), + series, + } +} + +function buildRadarChartOption( + chartTypeNode: SafeXmlNode, + chartNode: SafeXmlNode, + seriesArr: SeriesData[], + ctx: RenderContext +): echarts.EChartsOption { + const title = extractChartTitle(chartNode, seriesArr) + const titleStyle = extractTxPrStyle(chartNode.child('title'), ctx) + const titleLayout = extractTitleManualLayout(chartNode) + const legendInfo = extractLegendInfo(chartNode, ctx) + const legendOpt = legendInfo?.option + const legendTextStyle = { fontSize: 10, ...(legendInfo?.textStyle ?? {}) } + + // Categories come from the first series that has them + const categories = seriesArr.find((s) => s.categories.length > 0)?.categories || [] + + // Read valAx scaling for explicit min/max on radar + const plotArea = chartNode.child('plotArea') + const { valueAxis } = parseAxes(plotArea, ctx) + + // Determine indicator max: prefer explicit valAx max, else compute from data + padding + let indicatorMax: number + if (valueAxis.max !== undefined) { + indicatorMax = valueAxis.max + } else { + let maxVal = 0 + for (const s of seriesArr) { + for (const v of s.values) { + if (v > maxVal) maxVal = v + } + } + indicatorMax = Math.ceil(maxVal * 1.1) || 100 + } + + // PowerPoint radar charts place categories clockwise from top, + // but ECharts places indicators counterclockwise. To match PowerPoint, + // keep the first category at top and reverse the rest. + const cwCategories = + categories.length > 1 ? [categories[0], ...categories.slice(1).reverse()] : categories + + const indicator = cwCategories.map((cat) => ({ + name: cat, + max: indicatorMax, + })) + + // Read radar style to determine default marker behavior + const radarStyle = chartTypeNode.child('radarStyle').attr('val') // 'marker' | 'filled' | undefined + + const radarData = seriesArr.map((s) => { + // Reorder values to match the reversed category order + const cwValues = s.values.length > 1 ? [s.values[0], ...s.values.slice(1).reverse()] : s.values + const echartsSymbol = mapOoxmlSymbol(s.markerSymbol) + // Show symbols if radarStyle is 'marker' or series has explicit marker + const showSymbol = + radarStyle === 'marker' || (echartsSymbol !== undefined && echartsSymbol !== 'none') + // PowerPoint radar charts fill the area with a semi-transparent version of the line color + const isFilled = radarStyle === 'filled' + return { + name: s.name, + value: cwValues, + ...(s.colorHex + ? { + lineStyle: { + color: s.colorHex, + width: s.lineWidth ?? 3, + cap: 'round' as const, + join: 'round' as const, + }, + itemStyle: { color: s.colorHex }, + } + : { + lineStyle: { width: s.lineWidth ?? 3, cap: 'round' as const, join: 'round' as const }, + }), + areaStyle: isFilled + ? { ...(s.colorHex ? { color: s.colorHex } : {}), opacity: 0.5 } + : { ...(s.colorHex ? { color: s.colorHex } : {}), opacity: 0.15 }, + ...(echartsSymbol && echartsSymbol !== 'none' ? { symbol: echartsSymbol } : {}), + ...(s.markerSize ? { symbolSize: s.markerSize } : {}), + ...(showSymbol ? { symbolSize: s.markerSize ?? 6 } : {}), + } + }) + + const legendTopPx = getLegendTopPx(!!title, legendInfo) + return { + title: title + ? { + text: title, + left: 'center', + ...titleLayout, + textStyle: { fontSize: 12, ...(titleStyle ?? {}) }, + } + : undefined, + tooltip: {}, + legend: buildLegendOption( + legendOpt, + legendInfo, + legendTopPx, + seriesArr.map((s) => { + const icon = mapOoxmlSymbol(s.markerSymbol) + return icon && icon !== 'none' ? { name: s.name, icon } : s.name + }), + legendTextStyle + ), + radar: { indicator, radius: '58%', center: ['50%', '55%'] }, + series: [ + { + type: 'radar' as const, + data: radarData, + }, + ], + } +} + +function buildScatterChartOption( + chartTypeNode: SafeXmlNode, + chartNode: SafeXmlNode, + seriesArr: SeriesData[], + ctx: RenderContext +): echarts.EChartsOption { + const title = extractChartTitle(chartNode, seriesArr) + const titleStyle = extractTxPrStyle(chartNode.child('title'), ctx) + const titleLayout = extractTitleManualLayout(chartNode) + const legendInfo = extractLegendInfo(chartNode, ctx) + const legendOpt = legendInfo?.option + const legendTextStyle = { fontSize: 10, ...(legendInfo?.textStyle ?? {}) } + + // Parse scatter-specific marker defaults from scatterStyle + const scatterStyle = chartTypeNode.child('scatterStyle').attr('val') ?? 'lineMarker' + // Default scatter marker symbol per OOXML: lineMarker → diamond, smoothMarker → diamond + const defaultScatterSymbol = + scatterStyle === 'lineMarker' || scatterStyle === 'smoothMarker' ? 'diamond' : 'circle' + + const series = seriesArr.map((s) => { + // Use xValues if available (parsed from c:xVal), otherwise fall back to index + const data = s.values.map((v, i) => { + const x = s.xValues && i < s.xValues.length ? s.xValues[i] : i + return [x, v] + }) + const echartsSymbol = mapOoxmlSymbol(s.markerSymbol) ?? defaultScatterSymbol + const showSymbol = echartsSymbol !== 'none' + const renderAsLine = scatterStyle === 'smoothMarker' || s.smooth + if (renderAsLine) { + const lineData = + scatterStyle === 'smoothMarker' || s.smooth ? buildSmoothScatterLineData(data) : data + const lineWidth = s.lineWidth ?? 4 + return { + type: 'line' as const, + name: s.name, + data: lineData, + smooth: false, + showSymbol, + ...(showSymbol ? { symbol: echartsSymbol, symbolSize: s.markerSize ?? 8 } : {}), + ...(s.colorHex + ? { + lineStyle: { + color: s.colorHex, + width: lineWidth, + cap: 'round' as const, + join: 'round' as const, + }, + itemStyle: { color: s.colorHex }, + } + : { lineStyle: { width: lineWidth, cap: 'round' as const, join: 'round' as const } }), + } + } + return { + type: 'scatter' as const, + name: s.name, + data, + symbol: echartsSymbol, + symbolSize: s.markerSize ?? 8, + itemStyle: s.colorHex ? { color: s.colorHex } : undefined, + } + }) + const legendData = seriesArr.map((s) => { + const echartsSymbol = mapOoxmlSymbol(s.markerSymbol) ?? defaultScatterSymbol + const showSymbol = echartsSymbol !== 'none' + const renderAsLine = scatterStyle === 'smoothMarker' || s.smooth + if (renderAsLine) { + return showSymbol && echartsSymbol + ? { name: s.name, icon: echartsSymbol } + : { name: s.name, icon: lineLegendIconPath() } + } + return echartsSymbol && echartsSymbol !== 'none' + ? { name: s.name, icon: echartsSymbol } + : s.name + }) + + const plotArea = chartNode.child('plotArea') + const { xAxis: xAxisInfo, yAxis: yAxisInfo } = parseScatterAxes(plotArea, ctx) + + const gridTop = getGridTopPx(!!title, legendInfo) + const legendTopPx = getLegendTopPx(!!title, legendInfo) + const manualGrid = extractManualLayoutGrid(chartNode) + const containLabel = !hasManualGrid(manualGrid) + const scatterGridLeft = yAxisInfo.deleted ? 4 : 24 + const scatterGridTop = title ? gridTop + 12 : gridTop + const scatterGridBottom = Math.max(getGridBottomPx(legendInfo), 20) + + const xAxisDef: Record = { type: 'value' } + const yAxisDef: Record = { type: 'value' } + applyAxisInfo(xAxisDef, xAxisInfo, 'value') + applyAxisInfo(yAxisDef, yAxisInfo, 'value') + + return { + title: title + ? { + text: title, + left: 'center', + ...titleLayout, + textStyle: { fontSize: 14, ...(titleStyle ?? {}) }, + } + : undefined, + tooltip: { trigger: 'item' }, + legend: buildLegendOption(legendOpt, legendInfo, legendTopPx, legendData, legendTextStyle), + grid: { + containLabel, + left: scatterGridLeft, + right: 10, + top: scatterGridTop, + bottom: scatterGridBottom, + ...manualGrid, + }, + xAxis: xAxisDef, + yAxis: yAxisDef, + series, + } +} + +// --------------------------------------------------------------------------- +// Bubble Chart +// --------------------------------------------------------------------------- + +function buildBubbleChartOption( + chartTypeNode: SafeXmlNode, + chartNode: SafeXmlNode, + seriesArr: SeriesData[], + ctx: RenderContext +): echarts.EChartsOption { + const title = extractChartTitle(chartNode, seriesArr) + const titleStyle = extractTxPrStyle(chartNode.child('title'), ctx) + const titleLayout = extractTitleManualLayout(chartNode) + const legendInfo = extractLegendInfo(chartNode, ctx) + const legendOpt = legendInfo?.option + const legendTextStyle = { fontSize: 10, ...(legendInfo?.textStyle ?? {}) } + const bubbleScale = Math.max(chartTypeNode.child('bubbleScale').numAttr('val') ?? 100, 0) + const maxBubbleDiameter = 100 * (bubbleScale / 100 || 1) + + // Bubble charts scale bubble area by value. In screen space that means diameter + // should follow sqrt(value / maxValue), not a linear min-max interpolation. + let maxSize = Number.NEGATIVE_INFINITY + for (const s of seriesArr) { + if (s.bubbleSizes) { + for (const sz of s.bubbleSizes) { + if (sz > maxSize) maxSize = sz + } + } + } + const safeMaxBubbleSize = maxSize > 0 ? maxSize : 1 + + const series: echarts.ScatterSeriesOption[] = seriesArr.map((s) => { + const data = s.values.map((v, i) => { + const x = s.xValues && i < s.xValues.length ? s.xValues[i] : i + const bub = s.bubbleSizes && i < s.bubbleSizes.length ? s.bubbleSizes[i] : 0 + return [x, v, bub] + }) + return { + type: 'scatter' as const, + name: s.name, + data, + symbolSize: (val: number[]) => { + const bubbleValue = Math.max(Number(val[2]) || 0, 0) + return Math.sqrt(bubbleValue / safeMaxBubbleSize) * maxBubbleDiameter + }, + itemStyle: s.colorHex ? { color: s.colorHex } : undefined, + } + }) + + const plotArea = chartNode.child('plotArea') + const { xAxis: xAxisInfo, yAxis: yAxisInfo } = parseScatterAxes(plotArea, ctx) + + const gridTop = getGridTopPx(!!title, legendInfo) + const legendTopPx = getLegendTopPx(!!title, legendInfo) + const manualGrid = extractManualLayoutGrid(chartNode) + const containLabel = !hasManualGrid(manualGrid) + const scatterGridLeft = yAxisInfo.deleted ? 4 : 24 + const scatterGridTop = title ? gridTop + 12 : gridTop + const scatterGridBottom = Math.max(getGridBottomPx(legendInfo), 20) + + const xAxisDef: Record = { type: 'value' } + const yAxisDef: Record = { type: 'value' } + applyAxisInfo(xAxisDef, xAxisInfo, 'value') + applyAxisInfo(yAxisDef, yAxisInfo, 'value') + + return { + title: title + ? { + text: title, + left: 'center', + ...titleLayout, + textStyle: { fontSize: 14, ...(titleStyle ?? {}) }, + } + : undefined, + tooltip: { + trigger: 'item', + formatter: (params: unknown) => { + const p = params as { seriesName: string; value: number[] } + return `${p.seriesName}
    x: ${p.value[0]}, y: ${p.value[1]}, size: ${p.value[2]}` + }, + }, + legend: buildLegendOption( + legendOpt, + legendInfo, + legendTopPx, + seriesArr.map((s) => s.name), + legendTextStyle + ), + grid: { + containLabel, + left: scatterGridLeft, + right: 10, + top: scatterGridTop, + bottom: scatterGridBottom, + ...manualGrid, + }, + xAxis: xAxisDef, + yAxis: yAxisDef, + series, + } +} + +// --------------------------------------------------------------------------- +// Stock Chart (Candlestick) +// --------------------------------------------------------------------------- + +function buildStockChartOption( + _chartTypeNode: SafeXmlNode, + chartNode: SafeXmlNode, + seriesArr: SeriesData[], + ctx: RenderContext +): echarts.EChartsOption { + const title = extractChartTitle(chartNode, seriesArr) + const titleStyle = extractTxPrStyle(chartNode.child('title'), ctx) + const titleLayout = extractTitleManualLayout(chartNode) + const legendInfo = extractLegendInfo(chartNode, ctx) + + // Stock charts have 3 (HLC) or 4 (OHLC) series: + // OHLC order: open, high, low, close + // HLC order: high, low, close (open defaults to close → collapsed body) + const categories = seriesArr.find((s) => s.categories.length > 0)?.categories || [] + + // ECharts candlestick expects [open, close, low, high] per data point + const dataLen = categories.length || Math.max(...seriesArr.map((s) => s.values.length), 0) + const candleData: number[][] = [] + + if (seriesArr.length >= 4) { + // OHLC: series 0=open, 1=high, 2=low, 3=close + for (let i = 0; i < dataLen; i++) { + candleData.push([ + seriesArr[0].values[i] ?? 0, // open + seriesArr[3].values[i] ?? 0, // close + seriesArr[2].values[i] ?? 0, // low + seriesArr[1].values[i] ?? 0, // high + ]) + } + } else if (seriesArr.length >= 3) { + // HLC: series 0=high, 1=low, 2=close; open=close (collapsed body) + for (let i = 0; i < dataLen; i++) { + const close = seriesArr[2].values[i] ?? 0 + candleData.push([ + close, // open = close + close, // close + seriesArr[1].values[i] ?? 0, // low + seriesArr[0].values[i] ?? 0, // high + ]) + } + } else { + // Fallback: single series treated as close values with zero open + for (let i = 0; i < dataLen; i++) { + const val = seriesArr[0]?.values[i] ?? 0 + candleData.push([0, val, 0, val]) + } + } + + const plotArea = chartNode.child('plotArea') + const { valueAxis, categoryAxis } = parseAxes(plotArea, ctx) + + const gridTop = getGridTopPx(!!title, legendInfo) + const manualGrid = extractManualLayoutGrid(chartNode) + const containLabel = !hasManualGrid(manualGrid) + + const xAxisDef: Record = { + type: 'category', + data: categories, + axisLabel: { interval: 0, rotate: categories.length > 4 ? 30 : 0, fontSize: 10 }, + splitLine: { show: false }, + } + applyAxisInfo(xAxisDef, categoryAxis, 'category') + + const yAxisDef: Record = { type: 'value' } + applyAxisInfo(yAxisDef, valueAxis, 'value') + + const stockValues = candleData.flatMap((d) => [d[2], d[3]]).filter((v) => Number.isFinite(v)) + if (stockValues.length > 0) { + const stockMin = Math.min(...stockValues) + const stockMax = Math.max(...stockValues) + if (yAxisDef.min === undefined && stockMin >= 0) { + yAxisDef.min = 0 + } + if (yAxisDef.interval === undefined) { + yAxisDef.interval = niceAxisInterval(stockMax, stockMin, 7) + } + if (yAxisDef.max === undefined) { + const interval = Number(yAxisDef.interval) || niceAxisInterval(stockMax, stockMin, 7) + yAxisDef.max = Math.ceil(stockMax / interval) * interval + interval + } + } + + const legendOpt = legendInfo?.option + const legendTextStyle = { fontSize: 10, ...(legendInfo?.textStyle ?? {}) } + const legendTopPx = getLegendTopPx(!!title, legendInfo) + const isHlc = seriesArr.length >= 3 && seriesArr.length < 4 + + const legendData = isHlc + ? seriesArr.slice(0, 3).map((s) => ({ name: s.name, icon: 'none' })) + : seriesArr.map((s) => s.name) + + const series: echarts.SeriesOption[] = isHlc + ? [ + { + type: 'custom', + name: seriesArr[2].name, + coordinateSystem: 'cartesian2d', + // data: [categoryIndex, high, low, close] + data: Array.from({ length: dataLen }, (_, i) => [ + i, + seriesArr[0].values[i] ?? 0, + seriesArr[1].values[i] ?? 0, + seriesArr[2].values[i] ?? 0, + ]), + // eslint-disable-next-line @typescript-eslint/no-explicit-any + renderItem: (params: any, api: any) => { + const xValue = api.value(0) + const high = api.value(1) + const low = api.value(2) + const close = api.value(3) + const highPoint = api.coord([xValue, high]) + const lowPoint = api.coord([xValue, low]) + const closePoint = api.coord([xValue, close]) + const bandWidth = Math.max(8, api.size([1, 0])[0] || 12) + // Office HLC close marks stay as short ticks; scaling them with the full + // category band makes them look like stray mid-plot marker lines. + const tickWidth = Math.min(4, Math.max(2, Math.round(bandWidth * 0.04))) + const stemColor = pickSeriesStringColor(seriesArr[0].colorHex, '#000000') + const closeColor = pickSeriesStringColor(seriesArr[2].colorHex, '#00B050') + return { + type: 'group', + children: [ + { + type: 'line', + shape: { + x1: highPoint[0], + y1: highPoint[1], + x2: lowPoint[0], + y2: lowPoint[1], + }, + style: { + stroke: stemColor, + lineWidth: 1, + }, + }, + { + type: 'line', + shape: { + x1: closePoint[0], + y1: closePoint[1], + x2: closePoint[0] + tickWidth, + y2: closePoint[1], + }, + style: { + stroke: closeColor, + lineWidth: 1, + }, + }, + ], + } + }, + silent: true, + } as echarts.SeriesOption, + ] + : [ + { + type: 'candlestick' as const, + name: seriesArr.length >= 3 ? seriesArr[2].name : seriesArr[0]?.name, + data: candleData, + itemStyle: { + // OOXML up/down colors from series spPr; fallback to standard financial convention + color: pickSeriesStringColor( + seriesArr[seriesArr.length >= 4 ? 3 : 2]?.colorHex, + '#ec0000' + ), + color0: pickSeriesStringColor(seriesArr[0]?.colorHex, '#00da3c'), + borderColor: pickSeriesStringColor( + seriesArr[seriesArr.length >= 4 ? 3 : 2]?.colorHex, + '#ec0000' + ), + borderColor0: pickSeriesStringColor(seriesArr[0]?.colorHex, '#00da3c'), + }, + }, + ] + + return { + title: title + ? { + text: title, + left: 'center', + ...titleLayout, + textStyle: { fontSize: 14, ...(titleStyle ?? {}) }, + } + : undefined, + tooltip: { trigger: 'axis', axisPointer: { type: 'cross' } }, + legend: buildLegendOption(legendOpt, legendInfo, legendTopPx, legendData, legendTextStyle), + grid: { + containLabel, + // Stock charts with rotated date labels need extra left inset so the + // first category label is not clipped by the plot boundary. + left: 24, + right: 10, + top: gridTop, + bottom: getGridBottomPx(legendInfo), + ...manualGrid, + }, + xAxis: xAxisDef, + yAxis: yAxisDef, + series, + } +} + +// --------------------------------------------------------------------------- +// Data Table (c:dTable) +// --------------------------------------------------------------------------- + +/** Parsed c:dTable info for building the chart data table. */ +interface DataTableInfo { + seriesArr: SeriesData[] + showKeys: boolean + formatCode?: string +} + +/** + * Check if plotArea has c:dTable and parse showKeys. + */ +function parseDataTable(plotArea: SafeXmlNode): { showKeys: boolean } | undefined { + const dTable = plotArea.child('dTable') + if (!dTable.exists()) return undefined + const showKeys = dTable.child('showKeys').attr('val') !== '0' + return { showKeys } +} + +/** + * Build HTML table element from series data for chart data table (c:dTable). + */ +function buildDataTableElement(info: DataTableInfo, seriesColors?: string[]): HTMLTableElement { + const table = document.createElement('table') + table.style.width = '100%' + table.style.borderCollapse = 'collapse' + table.style.fontSize = '10px' + table.style.marginTop = '8px' + + const { seriesArr, showKeys, formatCode } = info + const categories = seriesArr.find((s) => s.categories.length > 0)?.categories || [] + const fc = formatCode || seriesArr.find((s) => s.formatCode)?.formatCode + + // Header row: empty cell + category names (columns = categories, matching X-axis) + const thead = document.createElement('thead') + const headerRow = document.createElement('tr') + const emptyTh = document.createElement('th') + emptyTh.style.border = '1px solid #ccc' + emptyTh.style.padding = '2px 6px' + emptyTh.style.textAlign = 'left' + emptyTh.style.fontWeight = 'bold' + headerRow.appendChild(emptyTh) + for (let i = 0; i < categories.length; i++) { + const th = document.createElement('th') + th.style.border = '1px solid #ccc' + th.style.padding = '2px 6px' + th.style.textAlign = 'right' + th.style.fontWeight = 'bold' + th.textContent = categories[i] ?? '' + headerRow.appendChild(th) + } + thead.appendChild(headerRow) + table.appendChild(thead) + + // Data rows: series name (with optional legend key) + values across categories + const tbody = document.createElement('tbody') + for (let si = 0; si < seriesArr.length; si++) { + const s = seriesArr[si] + const tr = document.createElement('tr') + const nameTd = document.createElement('td') + nameTd.style.border = '1px solid #ccc' + nameTd.style.padding = '2px 6px' + nameTd.style.textAlign = 'left' + nameTd.style.fontWeight = 'bold' + if (showKeys && seriesColors && seriesColors[si]) { + const key = document.createElement('span') + key.style.display = 'inline-block' + key.style.width = '8px' + key.style.height = '8px' + key.style.marginRight = '4px' + key.style.verticalAlign = 'middle' + key.style.backgroundColor = seriesColors[si] + nameTd.appendChild(key) + } + nameTd.appendChild(document.createTextNode(s.name || '')) + tr.appendChild(nameTd) + for (let ci = 0; ci < categories.length; ci++) { + const td = document.createElement('td') + td.style.border = '1px solid #ccc' + td.style.padding = '2px 6px' + td.style.textAlign = 'right' + const val = s.values[ci] + td.textContent = val !== undefined ? formatValue(val, fc ?? s.formatCode) : '' + tr.appendChild(td) + } + tbody.appendChild(tr) + } + table.appendChild(tbody) + + return table +} + +// --------------------------------------------------------------------------- +// Main Chart XML Parser +// --------------------------------------------------------------------------- + +/** + * Extract background colors from chartSpace and plotArea. + * Returns { chartBg, plotAreaBg } hex color strings or undefined. + */ +function extractBackgroundColors( + chartXml: SafeXmlNode, + chartNode: SafeXmlNode, + ctx: RenderContext +): { chartBg?: string; plotAreaBg?: string } { + let chartBg: string | undefined + let plotAreaBg: string | undefined + + // chartSpace > spPr > solidFill (overall chart background) + const chartSpaceSpPr = chartXml.child('spPr') + if (chartSpaceSpPr.exists()) { + const noFill = chartSpaceSpPr.child('noFill') + if (noFill.exists()) { + // Explicit noFill — leave chartBg undefined (transparent) + } else { + const fill = chartSpaceSpPr.child('solidFill') + if (fill.exists()) { + chartBg = resolveColorToHex(fill, ctx) + } else { + // No fill specified — use white so chart area is visible + chartBg = '#ffffff' + } + } + } + + // chart > plotArea > spPr > solidFill (plot area background) + const plotArea = chartNode.child('plotArea') + if (plotArea.exists()) { + const plotSpPr = plotArea.child('spPr') + if (plotSpPr.exists()) { + const noFill = plotSpPr.child('noFill') + if (!noFill.exists()) { + const fill = plotSpPr.child('solidFill') + if (fill.exists()) { + plotAreaBg = resolveColorToHex(fill, ctx) + } + } + } + } + + return { chartBg, plotAreaBg } +} + +/** + * Parse chartSpace-level clrMapOvr attributes into a color-map override. + * Example: + */ +function parseChartColorMapOverride(chartXml: SafeXmlNode): Map | undefined { + const clrMapOvr = chartXml.child('clrMapOvr') + if (!clrMapOvr.exists()) return undefined + + // Common forms: + // 1) + // 2) + // 3) (no override) + let sourceEl = clrMapOvr.element + const override = clrMapOvr.child('overrideClrMapping') + if (override.exists() && override.element) { + sourceEl = override.element + } else { + const master = clrMapOvr.child('masterClrMapping') + if (master.exists()) return undefined + } + if (!sourceEl) return undefined + + const attrs = sourceEl.attributes + const map = new Map() + for (let i = 0; i < attrs.length; i++) { + const attr = attrs[i] + map.set(attr.localName, attr.value) + } + return map.size > 0 ? map : undefined +} + +/** + * Create a chart-local render context that applies chartSpace clrMapOvr. + */ +function createChartRenderContext(chartXml: SafeXmlNode, ctx: RenderContext): RenderContext { + const colorMapOverride = parseChartColorMapOverride(chartXml) + if (!colorMapOverride) return ctx + return { + ...ctx, + layout: { ...ctx.layout, colorMapOverride }, + // color cache depends on color map; isolate chart-local cache. + colorCache: new Map(), + } +} + +function parseChartStyleId(chartXml: SafeXmlNode): number | undefined { + // c:chartSpace > c:style val="N" + const styleNode = chartXml.child('style') + const direct = styleNode.numAttr('val') + if (direct !== undefined) return direct + + // Some files use mc:AlternateContent > mc:Choice(c14) > c14:style + const alt = chartXml.child('AlternateContent') + if (!alt.exists()) return undefined + for (const branch of alt.allChildren()) { + const s = branch.child('style') + const v = s.numAttr('val') + if (v !== undefined) return v + } + return undefined +} + +function clamp01(v: number): number { + if (v < 0) return 0 + if (v > 1) return 1 + return v +} + +function tintHex(hex: string, amount: number): string { + const normalized = hex.startsWith('#') ? hex.slice(1) : hex + if (normalized.length !== 6) return hex.startsWith('#') ? hex : `#${hex}` + const r = Number.parseInt(normalized.slice(0, 2), 16) + const g = Number.parseInt(normalized.slice(2, 4), 16) + const b = Number.parseInt(normalized.slice(4, 6), 16) + if ([r, g, b].some((n) => Number.isNaN(n))) return hex.startsWith('#') ? hex : `#${hex}` + const a = clamp01(amount) + const mix = (c: number) => Math.round(c + (255 - c) * a) + return `#${[mix(r), mix(g), mix(b)].map((n) => n.toString(16).padStart(2, '0')).join('')}` +} + +/** + * Build a chart color palette from theme accents and chart style id. + * This improves parity with Office chart styles when series colors are implicit. + */ +function buildChartPalette(chartXml: SafeXmlNode, ctx: RenderContext): string[] | undefined { + const accents = ['accent1', 'accent2', 'accent3', 'accent4', 'accent5', 'accent6'] + .map((k) => ctx.theme.colorScheme.get(k)) + .filter((v): v is string => !!v) + .map((hex) => (hex.startsWith('#') ? hex : `#${hex}`)) + + if (accents.length === 0) return undefined + + const styleId = parseChartStyleId(chartXml) + if (styleId === undefined) return accents + + // Style ids 100+ use the same accent order as the base palette. + // No rotation needed — OOXML chart styles control visual appearance + // (e.g. 3D, transparency) but don't reorder series colors. + return accents +} + +// --------------------------------------------------------------------------- +// Chart-Space Default Font Size + Legend Grid Adjustment +// --------------------------------------------------------------------------- + +/** + * Apply chart-space default font size to all text elements in the ECharts option + * that still use hardcoded small defaults. Only overrides when no explicit OOXML + * font size was set on that element (i.e., value matches our hardcoded defaults). + */ +function applyDefaultFontSizes(option: echarts.EChartsOption, defaultFs: number): void { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const opt = option as any + + // Title: our defaults are 12 or 14 — replace with the chart-space default + if (opt.title?.textStyle?.fontSize) { + const cur = opt.title.textStyle.fontSize + if (cur <= 14) { + opt.title.textStyle.fontSize = defaultFs + } + } + + // Radar indicator font size + if (opt.radar) { + const radar = Array.isArray(opt.radar) ? opt.radar[0] : opt.radar + if (radar?.name?.textStyle) { + if (!radar.name.textStyle.fontSize || radar.name.textStyle.fontSize <= 10) { + radar.name.textStyle.fontSize = defaultFs + } + } + } + + // Series data label font sizes: apply default when no explicit OOXML font was set + const seriesArr = Array.isArray(opt.series) ? opt.series : opt.series ? [opt.series] : [] + for (const s of seriesArr) { + if (s?.label?.fontSize && (s.label.fontSize as number) <= 10) { + s.label.fontSize = defaultFs + } + } + + const applyAxisDefaultFontSize = (axis: any) => { + if (!axis?.axisLabel) return + const current = axis.axisLabel.fontSize + if (current === undefined || current <= 10) { + axis.axisLabel.fontSize = defaultFs + } + } + + const xAxes = Array.isArray(opt.xAxis) ? opt.xAxis : opt.xAxis ? [opt.xAxis] : [] + const yAxes = Array.isArray(opt.yAxis) ? opt.yAxis : opt.yAxis ? [opt.yAxis] : [] + for (const axis of [...xAxes, ...yAxes]) applyAxisDefaultFontSize(axis) + + if (opt.legend?.textStyle) { + const current = opt.legend.textStyle.fontSize + if (current === undefined || current <= 10) { + opt.legend.textStyle.fontSize = defaultFs + } + } +} + +function applyDefaultFontFamily(option: echarts.EChartsOption, fontFamily: string): void { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const opt = option as any + + if (opt.title?.textStyle && !opt.title.textStyle.fontFamily) { + opt.title.textStyle.fontFamily = fontFamily + } + if (opt.title?.textStyle && !opt.title.textStyle.fontWeight) { + opt.title.textStyle.fontWeight = 'bold' + } + + const applyAxisFontFamily = (axis: any) => { + if (!axis) return + const axisLabel = axis.axisLabel ?? (axis.axisLabel = {}) + if (!axisLabel.fontFamily) { + axisLabel.fontFamily = fontFamily + } + } + + const xAxes = Array.isArray(opt.xAxis) ? opt.xAxis : opt.xAxis ? [opt.xAxis] : [] + const yAxes = Array.isArray(opt.yAxis) ? opt.yAxis : opt.yAxis ? [opt.yAxis] : [] + for (const axis of [...xAxes, ...yAxes]) applyAxisFontFamily(axis) + + if (opt.legend?.textStyle && !opt.legend.textStyle.fontFamily) { + opt.legend.textStyle.fontFamily = fontFamily + } +} + +/** + * Adjust grid margins to prevent legend/chart overlap. + * When legend is at right or left with overlay=false, the grid needs a larger + * margin so that chart bars/lines don't extend into the legend area. + */ +function applyLegendGridMargins( + option: echarts.EChartsOption, + chartNode: SafeXmlNode, + defaultFs: number | undefined +): void { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const opt = option as any + if (!opt.grid || !opt.legend) return + if (opt.legend.show === false) return + + const legend = chartNode.child('legend') + if (!legend.exists()) return + const overlay = legend.child('overlay').attr('val') === '1' + if (overlay) return + + const posVal = legend.child('legendPos').attr('val') || 'r' + + // Only adjust for side-positioned legends + if (posVal === 'r' || posVal === 'l') { + // Estimate legend width based on legend names and font size + const legendData = opt.legend.data as (string | { name: string })[] | undefined + if (!legendData || legendData.length === 0) return + + const names = legendData.map((d: string | { name: string }) => + typeof d === 'string' ? d : d.name + ) + const fs = opt.legend?.textStyle?.fontSize ?? defaultFs ?? 12 + const iconWidth = Number(opt.legend?.itemWidth) || fs + // Estimate legend width: icon (~fontSize) + gap + text + padding + // Measure max text width considering CJK (wider) vs Latin (narrower) chars + let maxTextPx = 0 + for (const n of names) { + let w = 0 + for (const ch of n) { + w += ch.charCodeAt(0) > 0x2e80 ? fs : fs * 0.55 + } + if (w > maxTextPx) maxTextPx = w + } + // icon + gap + text + left/right padding + const estimatedLegendPx = iconWidth + 8 + maxTextPx + 14 + const gridMarginPx = Math.max(84, Math.round(estimatedLegendPx + 18)) + + // Check if manual grid layout was applied — don't override + if (typeof opt.grid.left === 'string' && opt.grid.left.includes('%')) return + if (typeof opt.grid.right === 'string' && opt.grid.right.includes('%')) return + + if (posVal === 'r') { + opt.grid.right = gridMarginPx + } else { + opt.grid.left = gridMarginPx + } + } +} + +/** + * Compute a "nice" axis max/min that PowerPoint would auto-calculate. + * PowerPoint rounds the value axis range to tidy tick marks (e.g., data max 5 → axis max 6). + * This post-processes the ECharts option to set axis max when not explicitly provided. + */ +function applyNiceAxisRange(option: echarts.EChartsOption): void { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const opt = option as any + + // Only applies to cartesian charts with axes + if (!opt.xAxis && !opt.yAxis) return + + // Collect all numeric data values from series, accounting for stacking + const allValues: number[] = [] + const xValues: number[] = [] + const yValues: number[] = [] + const seriesArr = Array.isArray(opt.series) ? opt.series : opt.series ? [opt.series] : [] + + // Group series by stack key to compute stacked totals + const stackGroups = new Map() + const unstackedValues: number[] = [] + + for (const s of seriesArr) { + if (!s.data) continue + const vals: number[] = [] + for (const d of s.data) { + if (typeof d === 'number') { + vals.push(d) + } else if (d && typeof d === 'object' && 'value' in d && typeof d.value === 'number') { + vals.push(d.value) + } else if (Array.isArray(d)) { + if (d.length >= 2 && typeof d[0] === 'number' && typeof d[1] === 'number') { + xValues.push(d[0]) + yValues.push(d[1]) + } + for (const v of d) { + if (typeof v === 'number') vals.push(v) + } + } else { + vals.push(0) + } + } + if (s.stack) { + const key = String(s.stack) + if (!stackGroups.has(key)) stackGroups.set(key, []) + stackGroups.get(key)!.push(vals) + } else { + unstackedValues.push(...vals) + } + } + + // For stacked series, compute per-category sums + for (const group of stackGroups.values()) { + const maxLen = Math.max(...group.map((v) => v.length)) + for (let i = 0; i < maxLen; i++) { + let sum = 0 + for (const vals of group) { + sum += vals[i] ?? 0 + } + allValues.push(sum) + } + } + allValues.push(...unstackedValues) + + if (allValues.length === 0) return + + const cartesianScatter = + xValues.length > 0 && + yValues.length > 0 && + (Array.isArray(opt.xAxis) ? opt.xAxis[0] : opt.xAxis)?.type === 'value' && + (Array.isArray(opt.yAxis) ? opt.yAxis[0] : opt.yAxis)?.type === 'value' + + const applyAxisExtent = (axis: any, values: number[], desiredTicks: number) => { + if (!axis || axis.type !== 'value' || values.length === 0) return + if (axis.min !== undefined && axis.max !== undefined) return + const dataMin = Math.min(...values) + const dataMax = Math.max(...values) + const interval = niceAxisInterval(dataMax, dataMin, desiredTicks) + if (axis.max === undefined) { + axis.max = niceAxisMax(dataMax, dataMin, desiredTicks) + } + if (axis.min === undefined && dataMin >= 0) { + axis.min = 0 + } + if (axis.interval === undefined) { + axis.interval = interval + } + } + + if (cartesianScatter) { + const xAxes = (Array.isArray(opt.xAxis) ? opt.xAxis : [opt.xAxis]) as Record[] + const yAxes = (Array.isArray(opt.yAxis) ? opt.yAxis : [opt.yAxis]) as Record[] + xAxes.forEach((ax) => applyAxisExtent(ax, xValues, 3)) + yAxes.forEach((ax) => applyAxisExtent(ax, yValues, 7)) + return + } + + // Find the value axes (could be xAxis or yAxis depending on bar direction) + const processAxis = (axis: unknown) => { + if (!axis) return + const axes = Array.isArray(axis) ? axis : [axis] + for (const ax of axes) { + if (!ax || ax.type !== 'value') continue + // Skip if explicit min/max already set + if (ax.min !== undefined && ax.max !== undefined) continue + + const dataMin = Math.min(...allValues) + const dataMax = Math.max(...allValues) + + // Only set max when not already specified + if (ax.max === undefined) { + ax.max = niceAxisMax(dataMax, dataMin) + } + // Set min to 0 when all values are non-negative and no explicit min + if (ax.min === undefined && dataMin >= 0) { + ax.min = 0 + } + } + } + + processAxis(opt.xAxis) + processAxis(opt.yAxis) +} + +/** + * Calculate a "nice" axis maximum, similar to PowerPoint's algorithm. + * Given data max, returns a rounded-up value that gives clean tick marks with headroom. + * PowerPoint always adds at least one tick interval above the data max. + */ +function niceAxisMax(dataMax: number, dataMin: number, desiredTicks = 5): number { + const niceInterval = niceAxisInterval(dataMax, dataMin, desiredTicks) + const niceMax = Math.ceil(dataMax / niceInterval) * niceInterval + return niceMax <= dataMax ? niceMax + niceInterval : niceMax +} + +function niceAxisInterval(dataMax: number, dataMin: number, desiredTicks = 5): number { + if (dataMax === 0 && dataMin === 0) return 1 + const range = dataMax - Math.min(0, dataMin) + if (range === 0) return dataMax > 0 ? dataMax * 1.2 : 1 + const rawInterval = range / desiredTicks + const magnitude = 10 ** Math.floor(Math.log10(rawInterval)) + const residual = rawInterval / magnitude + let niceInterval: number + if (residual <= 1) niceInterval = magnitude + else if (residual <= 2) niceInterval = 2 * magnitude + else if (residual <= 5) niceInterval = 5 * magnitude + else niceInterval = 10 * magnitude + return niceInterval +} + +/** + * Extract chart-space default font size from chartSpace > txPr > defRPr@sz. + * Returns size in pixels (OOXML sz is 1/100 pt; we convert to px at 96 DPI: 1pt = 1.333px). + * PowerPoint uses this as the default text size for all chart text elements. + */ +function extractChartDefaultFontSize(chartSpaceNode: SafeXmlNode): number | undefined { + const txPr = chartSpaceNode.child('txPr') + if (!txPr.exists()) return undefined + for (const p of txPr.children('p')) { + const pPr = p.child('pPr') + if (!pPr.exists()) continue + const defRPr = pPr.child('defRPr') + if (!defRPr.exists()) continue + const sz = defRPr.numAttr('sz') + if (sz !== undefined && sz > 0) { + // sz is 1/100 pt → convert to px at 96 DPI (1pt = 96/72 px ≈ 1.333px) + return Math.round((sz / 100) * (96 / 72)) + } + } + return undefined +} + +/** + * Estimate legend width as a percentage of chart width based on legend text length and font size. + * Used to reserve grid space when legend is at right or left (non-overlay). + */ +function estimateLegendWidthPct( + legendInfo: LegendInfo | undefined, + legendNames: string[], + baseFontSize: number +): string { + if (!legendInfo || legendInfo.overlay) return '2%' + const opt = legendInfo.option as Record | undefined + if (!opt) return '2%' + const isRight = opt.right !== undefined && opt.top !== undefined && opt.bottom === undefined + const isLeft = opt.left !== undefined && opt.top !== undefined && opt.bottom === undefined + if (!isRight && !isLeft) return '2%' + // Estimate based on longest label + icon + padding + const maxLen = Math.max(1, ...legendNames.map((n) => n.length)) + // Approximate: each char ≈ 0.6 * fontSize, plus icon (≈ fontSize) and padding (≈ fontSize) + const estimatedPx = maxLen * baseFontSize * 0.6 + baseFontSize * 3 + // Convert to percentage of typical chart width (assume ~600px as base) + const pct = Math.min(40, Math.max(15, Math.round((estimatedPx / 600) * 100))) + return `${pct}%` +} + +function createLegendIcon( + icon: string | undefined, + color: string, + width: number, + height: number, + strokeWidth = 2 +): SVGSVGElement { + const ns = 'http://www.w3.org/2000/svg' + const svg = document.createElementNS(ns, 'svg') + svg.setAttribute('width', String(width)) + svg.setAttribute('height', String(height)) + svg.setAttribute('viewBox', `0 0 ${width} ${height}`) + svg.style.display = 'block' + const normalized = icon ?? 'rect' + + if (normalized.startsWith('path://')) { + const path = document.createElementNS(ns, 'path') + path.setAttribute('d', normalized.slice('path://'.length)) + path.setAttribute('fill', 'none') + path.setAttribute('stroke', color) + path.setAttribute('stroke-width', String(strokeWidth)) + path.setAttribute('stroke-linecap', 'round') + svg.appendChild(path) + return svg + } + + if (normalized === 'diamond') { + const path = document.createElementNS(ns, 'path') + path.setAttribute( + 'd', + `M${width / 2} 1 L${width - 1} ${height / 2} L${width / 2} ${height - 1} L1 ${height / 2} Z` + ) + path.setAttribute('fill', color) + svg.appendChild(path) + return svg + } + + if (normalized === 'circle') { + const circle = document.createElementNS(ns, 'circle') + circle.setAttribute('cx', String(width / 2)) + circle.setAttribute('cy', String(height / 2)) + circle.setAttribute('r', String(Math.max(2, Math.min(width, height) / 2 - 1))) + circle.setAttribute('fill', color) + svg.appendChild(circle) + return svg + } + + const rect = document.createElementNS(ns, 'rect') + rect.setAttribute('x', '1') + rect.setAttribute('y', '1') + rect.setAttribute('width', String(Math.max(2, width - 2))) + rect.setAttribute('height', String(Math.max(2, height - 2))) + rect.setAttribute('fill', color) + svg.appendChild(rect) + return svg +} + +function resolveInsetToPx(value: string | number, total: number): string { + if (typeof value === 'number') return `${value}px` + const trimmed = value.trim() + if (trimmed.endsWith('%')) { + const pct = Number.parseFloat(trimmed.slice(0, -1)) + if (!Number.isNaN(pct)) return `${(pct / 100) * total}px` + } + return trimmed +} + +function buildCustomLegendOverlay( + option: echarts.EChartsOption, + size: { w: number; h: number } +): HTMLElement | null { + const legend = getLegendOptionObject(option.legend) + if (!legend || legend.show === false || legend.orient !== 'vertical') return null + if (legend.left === undefined && legend.right === undefined) return null + + const palette = Array.isArray(option.color) + ? option.color.filter((entry): entry is string => typeof entry === 'string') + : [] + + const rawData = legend.data ?? [] + type LegendOverlayEntry = { + name: string + icon: string | undefined + color: string + lineWidth: number + } + const entries = rawData + .map((item, index) => { + const name = typeof item === 'string' ? item : item.name + const itemIcon = typeof item === 'string' ? undefined : item.icon + if (!name) return null + const series = ( + Array.isArray(option.series) ? option.series : option.series ? [option.series] : [] + )[index] as Record | undefined + const lineStyle = (series?.lineStyle as Record | undefined) ?? {} + const itemStyle = (series?.itemStyle as Record | undefined) ?? {} + const color = + (typeof lineStyle.color === 'string' ? lineStyle.color : undefined) ?? + (typeof itemStyle.color === 'string' ? itemStyle.color : undefined) ?? + palette[index] ?? + '#2f6f8f' + const lineWidth = + typeof lineStyle.width === 'number' && Number.isFinite(lineStyle.width) + ? Math.max(1, lineStyle.width) + : 2 + return { name, icon: itemIcon ?? legend.icon, color, lineWidth } + }) + .filter((entry): entry is LegendOverlayEntry => entry !== null) + if (entries.length === 0) return null + + const overlay = document.createElement('div') + overlay.className = 'pptx-chart-custom-legend' + overlay.style.position = 'absolute' + overlay.style.display = 'flex' + overlay.style.flexDirection = 'column' + overlay.style.gap = '6px' + overlay.style.pointerEvents = 'none' + overlay.style.zIndex = '1' + overlay.style.whiteSpace = 'nowrap' + if (legend.left !== undefined) overlay.style.left = resolveInsetToPx(legend.left, size.w) + if (legend.right !== undefined) overlay.style.right = resolveInsetToPx(legend.right, size.w) + const sideLegend = + legend.orient === 'vertical' && (legend.left !== undefined || legend.right !== undefined) + if (sideLegend) { + overlay.style.top = `${size.h / 2}px` + overlay.style.transform = 'translateY(-50%)' + } else if (legend.top !== undefined) { + overlay.style.top = resolveInsetToPx(legend.top, size.h) + } + if (legend.bottom !== undefined) overlay.style.bottom = resolveInsetToPx(legend.bottom, size.h) + + const fontSize = legend.textStyle?.fontSize ?? 10 + const itemWidth = legend.itemWidth ?? fontSize + const itemHeight = legend.itemHeight ?? fontSize + + for (const entry of entries) { + const row = document.createElement('div') + row.style.display = 'flex' + row.style.alignItems = 'center' + row.style.gap = '6px' + + row.appendChild( + createLegendIcon(entry.icon, entry.color, itemWidth, itemHeight, entry.lineWidth) + ) + + const label = document.createElement('span') + label.textContent = entry.name + label.style.color = legend.textStyle?.color ?? '#000000' + label.style.fontSize = `${fontSize}px` + if (legend.textStyle?.fontFamily) { + label.style.fontFamily = legend.textStyle.fontFamily + } + if (legend.textStyle?.fontWeight !== undefined) { + label.style.fontWeight = String(legend.textStyle.fontWeight) + } + row.appendChild(label) + overlay.appendChild(row) + } + + return overlay +} + +function numToPct(val: number): string { + const n = Math.round(val * 10000) / 100 + return `${Number.isInteger(n) ? n.toFixed(0) : n}%`.replace(/\.0%$/, '%') +} + +/** + * Parse plotArea/layout/manualLayout to ECharts grid override. + */ +function extractManualLayoutGrid( + chartNode: SafeXmlNode +): Partial> { + const manual = chartNode.child('plotArea').child('layout').child('manualLayout') + if (!manual.exists()) return {} + const out: Partial> = {} + const x = manual.child('x').numAttr('val') + const y = manual.child('y').numAttr('val') + const w = manual.child('w').numAttr('val') + const h = manual.child('h').numAttr('val') + if (x !== undefined) out.left = numToPct(x) + if (y !== undefined) out.top = numToPct(y) + if (w !== undefined) out.width = numToPct(w) + if (h !== undefined) out.height = numToPct(h) + return out +} + +/** Result of parsing chart XML: option for ECharts, optional data table info. */ +export interface ParseChartResult { + option: echarts.EChartsOption + dataTable?: DataTableInfo +} + +function buildOptionForChartType( + typeName: OoxmlChartType, + chartTypeNode: SafeXmlNode, + chartNode: SafeXmlNode, + seriesArr: SeriesData[], + ctx: RenderContext +): echarts.EChartsOption | undefined { + switch (typeName) { + case 'barChart': + case 'bar3DChart': + return buildBarChartOption(chartTypeNode, chartNode, seriesArr, ctx) + case 'lineChart': + case 'line3DChart': + return buildLineChartOption(chartTypeNode, chartNode, seriesArr, ctx, false) + case 'areaChart': + case 'area3DChart': + case 'surface3DChart': + return buildLineChartOption(chartTypeNode, chartNode, seriesArr, ctx, true) + case 'pieChart': + case 'pie3DChart': + return buildPieChartOption(chartTypeNode, chartNode, seriesArr, false, ctx) + case 'doughnutChart': + return buildPieChartOption(chartTypeNode, chartNode, seriesArr, true, ctx) + case 'radarChart': + return buildRadarChartOption(chartTypeNode, chartNode, seriesArr, ctx) + case 'scatterChart': + return buildScatterChartOption(chartTypeNode, chartNode, seriesArr, ctx) + case 'bubbleChart': + return buildBubbleChartOption(chartTypeNode, chartNode, seriesArr, ctx) + case 'stockChart': + return buildStockChartOption(chartTypeNode, chartNode, seriesArr, ctx) + default: + return undefined + } +} + +function isCartesianComboCapable(typeName: OoxmlChartType): boolean { + return ( + typeName === 'barChart' || + typeName === 'bar3DChart' || + typeName === 'lineChart' || + typeName === 'line3DChart' || + typeName === 'areaChart' || + typeName === 'area3DChart' || + typeName === 'surface3DChart' + ) +} + +function mergeLegendData( + primaryLegend: echarts.EChartsOption['legend'], + secondaryLegend: echarts.EChartsOption['legend'] +): echarts.EChartsOption['legend'] { + const primary = getLegendOptionObject(primaryLegend) + const secondary = getLegendOptionObject(secondaryLegend) + if (!primary) return secondaryLegend + if (!secondary) return primaryLegend + + const mergedData = [...(primary.data ?? []), ...(secondary.data ?? [])] + const seen = new Set() + const deduped = mergedData.filter((entry) => { + const key = typeof entry === 'string' ? entry : entry.name + if (seen.has(key)) return false + seen.add(key) + return true + }) + + const merged: LegendOptionObject = { + ...primary, + data: deduped, + } + if (deduped.some((entry) => typeof entry === 'object' && entry.icon)) { + merged.icon = undefined + } + return merged +} + +function mergeCartesianComboOptions( + primary: echarts.EChartsOption, + secondary: echarts.EChartsOption +): echarts.EChartsOption { + const primarySeries = Array.isArray(primary.series) ? primary.series : [] + const secondarySeries = Array.isArray(secondary.series) ? secondary.series : [] + return { + ...primary, + legend: mergeLegendData(primary.legend, secondary.legend), + series: [...primarySeries, ...secondarySeries], + } +} + +/** + * Parse a chart XML (chartSpace root) into an ECharts option object and optional data table info. + * Exported for unit testing. + */ +export function parseChartXml(chartXml: SafeXmlNode, ctx: RenderContext): ParseChartResult { + const chartCtx = createChartRenderContext(chartXml, ctx) + const chartPalette = buildChartPalette(chartXml, chartCtx) + // Navigate: chartSpace > chart > plotArea + const chart = chartXml.child('chart') + const plotArea = chart.child('plotArea') + + if (!plotArea.exists()) { + return { option: { title: { text: 'Unsupported chart', left: 'center' } } } + } + + // Extract background colors + const { chartBg, plotAreaBg } = extractBackgroundColors(chartXml, chart, chartCtx) + + const chartTypeEntries = CHART_TYPE_ELEMENTS.map((typeName) => { + const chartTypeNode = plotArea.child(typeName) + if (!chartTypeNode.exists()) return null + const seriesArr = parseSeries(chartTypeNode, chartCtx) + if (seriesArr.length === 0) return null + return { typeName, chartTypeNode, seriesArr } + }).filter( + ( + entry + ): entry is { typeName: OoxmlChartType; chartTypeNode: SafeXmlNode; seriesArr: SeriesData[] } => + entry !== null + ) + + for (const [index, entry] of chartTypeEntries.entries()) { + let option = buildOptionForChartType( + entry.typeName, + entry.chartTypeNode, + chart, + entry.seriesArr, + chartCtx + ) + if (!option) continue + + if (index === 0 && chartTypeEntries.length > 1 && isCartesianComboCapable(entry.typeName)) { + for (const comboEntry of chartTypeEntries.slice(1)) { + if (!isCartesianComboCapable(comboEntry.typeName)) continue + const comboOption = buildOptionForChartType( + comboEntry.typeName, + comboEntry.chartTypeNode, + chart, + comboEntry.seriesArr, + chartCtx + ) + if (!comboOption) continue + option = mergeCartesianComboOptions(option, comboOption) + } + } + + // Apply chart-space default font sizes to text elements that use hardcoded defaults + const defaultFs = extractChartDefaultFontSize(chartXml) + if (defaultFs) { + applyDefaultFontSizes(option, defaultFs) + } + const defaultFontFamily = getChartThemeFontFamily(chartCtx) + if (defaultFontFamily) { + applyDefaultFontFamily(option, defaultFontFamily) + } + + // Adjust grid margins for legend placement (non-overlay) + applyLegendGridMargins(option, chart, defaultFs) + + // Apply PowerPoint-like nice axis range (adds headroom beyond data max) + applyNiceAxisRange(option) + + // Apply background colors + if (chartBg) { + option.backgroundColor = chartBg + } + if (chartPalette && chartPalette.length > 0) { + option.color = chartPalette + } + if (plotAreaBg && option.grid) { + // Apply plot area background via grid (for cartesian charts) + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(option.grid as any).backgroundColor = plotAreaBg + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(option.grid as any).show = true + } + + const dataTableSeries = + index === 0 && chartTypeEntries.length > 1 && isCartesianComboCapable(entry.typeName) + ? chartTypeEntries + .filter((candidate) => isCartesianComboCapable(candidate.typeName)) + .flatMap((candidate) => candidate.seriesArr) + .sort((a, b) => a.order - b.order) + : entry.seriesArr + + // Build data table info when c:dTable exists + const dTableMeta = parseDataTable(plotArea) + const dataTable: DataTableInfo | undefined = dTableMeta + ? { + seriesArr: dataTableSeries, + showKeys: dTableMeta.showKeys, + formatCode: dataTableSeries.find((s) => s.formatCode)?.formatCode, + } + : undefined + + return { option, dataTable } + } + + return { + option: { + title: { text: 'Unsupported chart type', left: 'center', textStyle: { fontSize: 12 } }, + }, + } +} + +// --------------------------------------------------------------------------- +// Public Render Function +// --------------------------------------------------------------------------- + +/** + * Render a chart node into an HTML element with an ECharts instance. + */ +export function renderChart(node: ChartNodeData, ctx: RenderContext): HTMLElement { + const wrapper = document.createElement('div') + wrapper.style.position = 'absolute' + wrapper.style.left = `${node.position.x}px` + wrapper.style.top = `${node.position.y}px` + wrapper.style.width = `${node.size.w}px` + wrapper.style.height = `${node.size.h}px` + wrapper.style.overflow = 'hidden' + wrapper.style.display = 'flex' + wrapper.style.flexDirection = 'column' + + const chartXml = ctx.presentation.charts?.get(node.chartPath) + if (!chartXml) { + wrapper.style.border = '1px dashed #ccc' + wrapper.style.display = 'flex' + wrapper.style.alignItems = 'center' + wrapper.style.justifyContent = 'center' + wrapper.style.color = '#999' + wrapper.style.fontSize = '12px' + wrapper.textContent = 'Chart not found' + return wrapper + } + + // Create chart container (clip content so legend/title stay inside) + const chartDiv = document.createElement('div') + chartDiv.style.width = '100%' + chartDiv.style.flex = '1' + chartDiv.style.minWidth = '0' + chartDiv.style.minHeight = '0' + chartDiv.style.overflow = 'hidden' + wrapper.appendChild(chartDiv) + + // Parse chart data and create ECharts option + const { option, dataTable } = parseChartXml(chartXml, ctx) + const customLegend = buildCustomLegendOverlay(option, node.size) + const legendOption = getLegendOptionObject(option.legend) + if (customLegend && legendOption) { + legendOption.show = false + wrapper.appendChild(customLegend) + } + + // Append data table below chart when c:dTable exists + if (dataTable) { + const seriesColors = dataTable.seriesArr.map((s) => s.colorHex).filter(Boolean) as string[] + const tableEl = buildDataTableElement( + dataTable, + seriesColors.length > 0 ? seriesColors : undefined + ) + wrapper.appendChild(tableEl) + } + + // Initialize ECharts after the element is attached to the DOM. + // Use requestAnimationFrame to ensure the container has dimensions. + const chartSet = ctx.chartInstances + requestAnimationFrame(() => { + if (!chartDiv.isConnected) return + // Guard against 0-size containers (e.g. hidden tabs); defer until non-zero. + if (chartDiv.offsetWidth === 0 || chartDiv.offsetHeight === 0) { + const sizeObserver = new ResizeObserver((entries) => { + const { width, height } = entries[0].contentRect + if (width > 0 && height > 0) { + sizeObserver.disconnect() + initChart(chartDiv, option, chartSet) + } + }) + sizeObserver.observe(chartDiv) + return + } + initChart(chartDiv, option, chartSet) + }) + + return wrapper +} + +/** Actually create ECharts instance, set option, and wire up resize + dispose. */ +function initChart( + container: HTMLElement, + option: echarts.EChartsOption, + chartInstances?: Set +): void { + try { + const chart = echarts.init(container) + chart.setOption(option) + chartInstances?.add(chart) + + // Handle container resize + const ro = new ResizeObserver(() => { + if (container.isConnected) { + chart.resize() + } else { + // Container removed from DOM — dispose to prevent leaks + ro.disconnect() + if (!chart.isDisposed()) { + chart.dispose() + } + chartInstances?.delete(chart) + } + }) + ro.observe(container) + } catch (error) { + logger.warn('Failed to initialize ECharts', { error }) + container.style.display = 'flex' + container.style.alignItems = 'center' + container.style.justifyContent = 'center' + container.style.color = '#999' + container.style.fontSize = '12px' + container.textContent = 'Chart render error' + } +} diff --git a/apps/sim/lib/pptx-renderer/renderer/group-renderer.ts b/apps/sim/lib/pptx-renderer/renderer/group-renderer.ts new file mode 100644 index 00000000000..c9352ee7aab --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/group-renderer.ts @@ -0,0 +1,218 @@ +/** + * Group renderer — renders grouped shapes with coordinate space remapping. + */ + +import type { BaseNodeData } from '../model/nodes/base-node' +import type { GroupNodeData } from '../model/nodes/group-node' +import type { ShapeNodeData } from '../model/nodes/shape-node' +import type { RenderContext } from './render-context' + +// --------------------------------------------------------------------------- +// Group Rendering +// --------------------------------------------------------------------------- + +/** + * Render a group node into an absolutely-positioned HTML element. + * + * Groups define a child coordinate space (childOffset + childExtent) that must + * be remapped to the group's actual position and size. Each child's position + * and size are transformed accordingly before rendering. + * + * @param node The parsed group node data + * @param ctx The render context + * @param renderNode A callback to render individual child nodes (avoids circular deps) + */ +export function renderGroup( + node: GroupNodeData, + ctx: RenderContext, + renderNode: (childNode: BaseNodeData, ctx: RenderContext) => HTMLElement +): HTMLElement { + const wrapper = document.createElement('div') + wrapper.style.position = 'absolute' + wrapper.style.left = `${node.position.x}px` + wrapper.style.top = `${node.position.y}px` + wrapper.style.width = `${node.size.w}px` + wrapper.style.height = `${node.size.h}px` + + // Apply rotation transform + const transforms: string[] = [] + if (node.rotation !== 0) { + transforms.push(`rotate(${node.rotation}deg)`) + } + if (node.flipH) { + transforms.push('scaleX(-1)') + } + if (node.flipV) { + transforms.push('scaleY(-1)') + } + if (transforms.length > 0) { + wrapper.style.transform = transforms.join(' ') + wrapper.style.transformOrigin = 'center center' + } + + const chOff = node.childOffset + const chExt = node.childExtent + const groupW = node.size.w + const groupH = node.size.h + + // Resolve group fill from grpSpPr for children that use a:grpFill + const grpSpPr = node.source.child('grpSpPr') + const childCtx: RenderContext = { ...ctx } + if (grpSpPr.exists()) { + // Check if the group itself has a fill (solidFill, gradFill, etc.) + // that children can inherit via grpFill + const FILL_TAGS = ['solidFill', 'gradFill', 'blipFill', 'pattFill'] + for (const tag of FILL_TAGS) { + if (grpSpPr.child(tag).exists()) { + childCtx.groupFillNode = grpSpPr + break + } + } + // If the group itself uses grpFill, propagate the parent's group fill + if (!childCtx.groupFillNode && grpSpPr.child('grpFill').exists() && ctx.groupFillNode) { + childCtx.groupFillNode = ctx.groupFillNode + } + } + + // Cycle diagram: 3 pie sectors + 3 circular arrows → one circle (3 equal 120° sectors) centered in the diagram. + const parsedChildren = new Map() + const parseByIndex = (index: number): BaseNodeData | undefined => { + if (!parsedChildren.has(index)) { + parsedChildren.set(index, parseGroupChild(node.children[index], ctx)) + } + return parsedChildren.get(index) + } + + let pieCommon: { x: number; y: number; w: number; h: number } | null = null + if (node.children.length === 6 && chExt.w > 0 && chExt.h > 0) { + const prst = (c: (typeof node.children)[0]) => c.child('spPr').child('prstGeom').attr('prst') + const firstPie = node.children.slice(0, 3).every((c) => prst(c) === 'pie') + const nextArrow = node.children.slice(3, 6).every((c) => prst(c) === 'circularArrow') + if (firstPie && nextArrow) { + // Use diagram extent center and a single circle size so the circle is centered and fits. + const pieNodes = [0, 1, 2].map((i) => parseByIndex(i)).filter(Boolean) + if (pieNodes.length === 3) { + const pieW = Math.max(...pieNodes.map((n) => n!.size.w)) + const pieH = Math.max(...pieNodes.map((n) => n!.size.h)) + const circleSize = Math.min(pieW, pieH, chExt.w, chExt.h) + const centerX = chOff.x + chExt.w / 2 + const centerY = chOff.y + chExt.h / 2 + const left = centerX - circleSize / 2 + const top = centerY - circleSize / 2 + pieCommon = { + x: ((left - chOff.x) / chExt.w) * groupW, + y: ((top - chOff.y) / chExt.h) * groupH, + w: (circleSize / chExt.w) * groupW, + h: (circleSize / chExt.h) * groupH, + } + } + } + } + + // Cycle diagram: render arrows first (3,4,5) then pies (0,1,2) so blue sectors draw on top. + const order = pieCommon ? [3, 4, 5, 0, 1, 2] : undefined + const indices = order ?? node.children.map((_, i) => i) + + for (const index of indices) { + try { + const childNode = parseByIndex(index) + if (!childNode) continue + + // Remap child coordinates from child space to group space + if (chExt.w > 0 && chExt.h > 0) { + childNode.position = { + x: ((childNode.position.x - chOff.x) / chExt.w) * groupW, + y: ((childNode.position.y - chOff.y) / chExt.h) * groupH, + } + childNode.size = { + w: (childNode.size.w / chExt.w) * groupW, + h: (childNode.size.h / chExt.h) * groupH, + } + } + + // Overlap the 3 pie sectors at the same center so they form one circle + if (pieCommon && index < 3 && childNode.nodeType === 'shape') { + const origW = childNode.size.w + const origH = childNode.size.h + childNode.position = { x: pieCommon.x, y: pieCommon.y } + childNode.size = { w: pieCommon.w, h: pieCommon.h } + // Scale text box so labels stay in the right sector (txXfrm was in original shape space) + const shapeNode = childNode as ShapeNodeData + if (origW > 0 && origH > 0 && shapeNode.textBoxBounds) { + const tb = shapeNode.textBoxBounds + shapeNode.textBoxBounds = { + x: (tb.x / origW) * pieCommon.w, + y: (tb.y / origH) * pieCommon.h, + w: (tb.w / origW) * pieCommon.w, + h: (tb.h / origH) * pieCommon.h, + } + } + } + + const el = renderNode(childNode, childCtx) + wrapper.appendChild(el) + } catch { + // Per-child error handling — create error placeholder + const errDiv = document.createElement('div') + errDiv.style.position = 'absolute' + errDiv.style.border = '1px dashed #ff6b6b' + errDiv.style.backgroundColor = 'rgba(255,107,107,0.1)' + errDiv.style.fontSize = '10px' + errDiv.style.color = '#cc0000' + errDiv.style.display = 'flex' + errDiv.style.alignItems = 'center' + errDiv.style.justifyContent = 'center' + errDiv.style.padding = '2px' + errDiv.textContent = 'Group child error' + wrapper.appendChild(errDiv) + } + } + + return wrapper +} + +// --------------------------------------------------------------------------- +// Child Node Parsing +// --------------------------------------------------------------------------- + +import { parseChartNode } from '../model/nodes/chart-node' +import { parseGroupNode } from '../model/nodes/group-node' +import { parsePicNode } from '../model/nodes/pic-node' +// Import parsers for child dispatch +import { parseShapeNode } from '../model/nodes/shape-node' +import { parseTableNode } from '../model/nodes/table-node' +import { parseOleFrameAsPicture } from '../model/slide' +import type { SafeXmlNode } from '../parser/xml-parser' + +/** + * Parse a raw XML child node from a group's spTree into a typed node object. + * Returns undefined for unrecognized or unsupported elements. + */ +function parseGroupChild(childXml: SafeXmlNode, ctx: RenderContext): BaseNodeData | undefined { + const tag = childXml.localName + + switch (tag) { + case 'sp': + case 'cxnSp': + return parseShapeNode(childXml) + case 'pic': + return parsePicNode(childXml) + case 'grpSp': + return parseGroupNode(childXml) + case 'graphicFrame': { + const graphic = childXml.child('graphic') + const graphicData = graphic.child('graphicData') + if (graphicData.child('tbl').exists()) { + return parseTableNode(childXml) + } + if ((graphicData.attr('uri') || '').includes('chart')) { + return parseChartNode(childXml, ctx.slide.rels, ctx.slide.slidePath) + } + const olePic = parseOleFrameAsPicture(childXml) + if (olePic) return olePic + return undefined + } + default: + return undefined + } +} diff --git a/apps/sim/lib/pptx-renderer/renderer/image-renderer.ts b/apps/sim/lib/pptx-renderer/renderer/image-renderer.ts new file mode 100644 index 00000000000..49dbb66f6ac --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/image-renderer.ts @@ -0,0 +1,656 @@ +/** + * Image renderer — converts PicNodeData into positioned HTML image/video/audio elements. + */ + +import type { PicNodeData } from '../model/nodes/pic-node' +import { hexToRgb } from '../utils/color' +import { parseEmfContent } from '../utils/emf-parser' +import { getOrCreateBlobUrl, resolveMediaPath } from '../utils/media' +import { renderPdfToImage } from '../utils/pdf-renderer' +import type { RenderContext } from './render-context' +import { resolveColor } from './style-resolver' + +/** + * Check if a file extension is an unsupported legacy format (WMF only now; EMF is handled). + */ +function isUnsupportedFormat(path: string): boolean { + const ext = path.split('.').pop()?.toLowerCase() || '' + return ext === 'wmf' +} + +/** + * Check if a file path is an EMF image. + */ +function isEmfFormat(path: string): boolean { + const ext = path.split('.').pop()?.toLowerCase() || '' + return ext === 'emf' +} + +// --------------------------------------------------------------------------- +// Image Rendering +// --------------------------------------------------------------------------- + +/** + * Render a picture node into an absolutely-positioned HTML element. + * + * Handles: + * - Standard images (png, jpg, gif, svg, bmp) + * - Unsupported formats (emf, wmf) with placeholder + * - Video elements with controls + * - Audio elements with controls + * - Crop via CSS clip-path + * - Rotation and flip transforms + */ +export function renderImage(node: PicNodeData, ctx: RenderContext): HTMLElement { + const wrapper = document.createElement('div') + wrapper.style.position = 'absolute' + wrapper.style.left = `${node.position.x}px` + wrapper.style.top = `${node.position.y}px` + wrapper.style.width = `${node.size.w}px` + wrapper.style.height = `${node.size.h}px` + wrapper.style.overflow = 'hidden' + + // Apply transforms + const transforms: string[] = [] + if (node.rotation !== 0) { + transforms.push(`rotate(${node.rotation}deg)`) + } + if (node.flipH) { + transforms.push('scaleX(-1)') + } + if (node.flipV) { + transforms.push('scaleY(-1)') + } + if (transforms.length > 0) { + wrapper.style.transform = transforms.join(' ') + } + + // ---- Handle video ---- + if (node.isVideo) { + renderVideo(node, ctx, wrapper) + return wrapper + } + + // ---- Handle audio ---- + if (node.isAudio) { + renderAudio(node, ctx, wrapper) + return wrapper + } + + // ---- Resolve image data ---- + const embedId = node.blipEmbed + if (!embedId) { + renderPlaceholder(wrapper, 'No image data') + return wrapper + } + + const rel = ctx.slide.rels.get(embedId) + if (!rel) { + renderPlaceholder(wrapper, 'Missing image reference') + return wrapper + } + + const mediaPath = resolveMediaPath(rel.target) + + // Check for unsupported formats (WMF) + if (isUnsupportedFormat(mediaPath)) { + renderUnsupportedPlaceholder(wrapper, mediaPath) + return wrapper + } + + const data = ctx.presentation.media.get(mediaPath) + if (!data) { + renderPlaceholder(wrapper, 'Image not found') + return wrapper + } + + // Handle EMF images — extract embedded PDF/bitmap content + if (isEmfFormat(mediaPath)) { + const emfData = data instanceof Uint8Array ? data : new Uint8Array(data) + renderEmf(emfData, node, ctx, wrapper, mediaPath) + return wrapper + } + + // Create blob URL (with caching) + const url = getOrCreateBlobUrl(mediaPath, data, ctx.mediaUrlCache) + + // Create image element + const img = document.createElement('img') + img.src = url + img.style.width = '100%' + img.style.height = '100%' + img.style.objectFit = 'fill' + img.style.display = 'block' + img.draggable = false + + // Apply crop if present. + // OOXML srcRect defines what portion of the source image is cropped away. + // The REMAINING visible region must stretch to fill the entire shape bounding box. + // We achieve this by scaling the larger than the wrapper and offsetting it, + // relying on the wrapper's overflow:hidden to clip. + if (node.crop) { + const { top, right, bottom, left } = node.crop + // Visible fraction of original image in each dimension + const visibleW = 1 - left - right + const visibleH = 1 - top - bottom + // Guard against degenerate crops (<=0 visible) + if (visibleW > 0.001 && visibleH > 0.001) { + // Scale image so the visible portion fills the wrapper exactly + const scaleX = 1 / visibleW // e.g. if 95.4% visible → scale to ~104.8% + const scaleY = 1 / visibleH + // Use pixel values for offset — CSS margin-top/margin-left percentages are + // both relative to the containing block's WIDTH (not height), which causes + // incorrect offsets for non-square wrappers with significant crops. + const wrapperW = node.size.w + const wrapperH = node.size.h + img.style.width = `${(scaleX * wrapperW).toFixed(4)}px` + img.style.height = `${(scaleY * wrapperH).toFixed(4)}px` + img.style.marginLeft = `${(-left * scaleX * wrapperW).toFixed(4)}px` + img.style.marginTop = `${(-top * scaleY * wrapperH).toFixed(4)}px` + } + } + + // --- Blip effects --- + const blip = node.source.child('blipFill').child('blip') + const blipOpacity = resolveBlipOpacity(blip) + if (blipOpacity < 1) { + wrapper.style.opacity = `${Number(blipOpacity.toFixed(4))}` + } + + // Duotone: recolor image (dark→color1, light→color2) + const duotone = blip.child('duotone') + if (duotone.exists()) { + applyDuotoneFilter(duotone, ctx, img, wrapper) + } + + // Luminance: brightness/contrast adjustment + const lum = blip.child('lum') + if (lum.exists()) { + applyLumEffect(lum, img) + } + + // BiLevel: threshold to black/white + const biLevel = blip.child('biLevel') + if (biLevel.exists()) { + applyBiLevelEffect(biLevel, img) + } + + wrapper.appendChild(img) + return wrapper +} + +/** + * Resolve overall image opacity from OOXML blip alpha modifiers. + * + * Supported today: + * - alphaModFix amt="N" + * - alphaMod val="N" + * - alphaOff val="N" + */ +function resolveBlipOpacity(blip: SafeXmlNode): number { + let alpha = 1 + + const alphaModFix = blip.child('alphaModFix') + if (alphaModFix.exists()) { + alpha *= (alphaModFix.numAttr('amt') ?? 100000) / 100000 + } + + const alphaMod = blip.child('alphaMod') + if (alphaMod.exists()) { + alpha *= (alphaMod.numAttr('val') ?? 100000) / 100000 + } + + const alphaOff = blip.child('alphaOff') + if (alphaOff.exists()) { + alpha += (alphaOff.numAttr('val') ?? 0) / 100000 + } + + return Math.max(0, Math.min(1, alpha)) +} + +/** + * Render a video element inside the wrapper. + */ +function renderVideo(node: PicNodeData, ctx: RenderContext, wrapper: HTMLElement): void { + // Try to get video URL from mediaRId + const videoUrl = resolveMediaUrl(node.mediaRId, ctx) + + // Also try to show poster image from blipEmbed + let posterUrl: string | undefined + if (node.blipEmbed) { + const rel = ctx.slide.rels.get(node.blipEmbed) + if (rel) { + const mediaPath = resolveMediaPath(rel.target) + const data = ctx.presentation.media.get(mediaPath) + if (data && !isUnsupportedFormat(mediaPath)) { + posterUrl = getOrCreateBlobUrl(mediaPath, data, ctx.mediaUrlCache) + } + } + } + + if (videoUrl) { + const video = document.createElement('video') + video.src = videoUrl + video.controls = true + video.style.width = '100%' + video.style.height = '100%' + video.style.objectFit = 'contain' + video.style.backgroundColor = '#000' + if (posterUrl) { + video.poster = posterUrl + } + wrapper.appendChild(video) + } else if (posterUrl) { + // No video data available — show poster with play overlay + const img = document.createElement('img') + img.src = posterUrl + img.style.width = '100%' + img.style.height = '100%' + img.style.objectFit = 'fill' + wrapper.appendChild(img) + + const overlay = document.createElement('div') + overlay.style.position = 'absolute' + overlay.style.inset = '0' + overlay.style.display = 'flex' + overlay.style.alignItems = 'center' + overlay.style.justifyContent = 'center' + overlay.style.backgroundColor = 'rgba(0,0,0,0.3)' + overlay.style.color = '#fff' + overlay.style.fontSize = '24px' + overlay.textContent = '\u25B6' // play symbol + wrapper.appendChild(overlay) + } else { + renderPlaceholder(wrapper, 'Video') + } +} + +/** + * Render an audio element inside the wrapper. + */ +function renderAudio(node: PicNodeData, ctx: RenderContext, wrapper: HTMLElement): void { + const audioUrl = resolveMediaUrl(node.mediaRId, ctx) + + if (audioUrl) { + // Show poster image if available + if (node.blipEmbed) { + const rel = ctx.slide.rels.get(node.blipEmbed) + if (rel) { + const mediaPath = resolveMediaPath(rel.target) + const data = ctx.presentation.media.get(mediaPath) + if (data && !isUnsupportedFormat(mediaPath)) { + const cached = getOrCreateBlobUrl(mediaPath, data, ctx.mediaUrlCache) + const img = document.createElement('img') + img.src = cached + img.style.width = '100%' + img.style.height = 'calc(100% - 32px)' + img.style.objectFit = 'contain' + wrapper.appendChild(img) + } + } + } + + const audio = document.createElement('audio') + audio.src = audioUrl + audio.controls = true + audio.style.width = '100%' + audio.style.position = 'absolute' + audio.style.bottom = '0' + audio.style.left = '0' + wrapper.appendChild(audio) + } else { + renderPlaceholder(wrapper, 'Audio') + } +} + +/** + * Resolve a media URL from a relationship ID. + */ +function resolveMediaUrl(rId: string | undefined, ctx: RenderContext): string | undefined { + if (!rId) return undefined + + const rel = ctx.slide.rels.get(rId) + if (!rel) return undefined + + // Check if target is an external URL + if (rel.target.startsWith('http://') || rel.target.startsWith('https://')) { + return rel.target + } + + // Resolve from embedded media + const mediaPath = resolveMediaPath(rel.target) + const data = ctx.presentation.media.get(mediaPath) + if (!data) return undefined + + return getOrCreateBlobUrl(mediaPath, data, ctx.mediaUrlCache) +} + +/** + * Render a placeholder div for missing or error content. + */ +function renderPlaceholder(wrapper: HTMLElement, message: string): void { + const placeholder = document.createElement('div') + placeholder.style.width = '100%' + placeholder.style.height = '100%' + placeholder.style.display = 'flex' + placeholder.style.alignItems = 'center' + placeholder.style.justifyContent = 'center' + placeholder.style.backgroundColor = '#f0f0f0' + placeholder.style.color = '#888' + placeholder.style.fontSize = '12px' + placeholder.style.border = '1px dashed #ccc' + placeholder.textContent = message + wrapper.appendChild(placeholder) +} + +/** + * Render a placeholder for unsupported image formats (WMF). + */ +function renderUnsupportedPlaceholder(wrapper: HTMLElement, path: string): void { + const ext = path.split('.').pop()?.toUpperCase() || 'Unknown' + const placeholder = document.createElement('div') + placeholder.style.width = '100%' + placeholder.style.height = '100%' + placeholder.style.display = 'flex' + placeholder.style.flexDirection = 'column' + placeholder.style.alignItems = 'center' + placeholder.style.justifyContent = 'center' + placeholder.style.backgroundColor = '#f5f5f5' + placeholder.style.color = '#999' + placeholder.style.fontSize = '11px' + placeholder.style.border = '1px dashed #ddd' + + const icon = document.createElement('div') + icon.style.fontSize = '24px' + icon.style.marginBottom = '4px' + icon.textContent = '\uD83D\uDDBC' // framed picture emoji + + const label = document.createElement('div') + label.textContent = `Unsupported format: ${ext}` + + placeholder.appendChild(icon) + placeholder.appendChild(label) + wrapper.appendChild(placeholder) +} + +// --------------------------------------------------------------------------- +// EMF Rendering +// --------------------------------------------------------------------------- + +/** + * Render EMF content by extracting embedded PDF or bitmap data. + */ +function renderEmf( + data: Uint8Array, + node: PicNodeData, + ctx: RenderContext, + wrapper: HTMLElement, + mediaPath: string +): void { + const content = parseEmfContent(data) + + switch (content.type) { + case 'pdf': + renderEmfPdf(content.data, wrapper, node, ctx, mediaPath) + break + case 'bitmap': + renderEmfBitmap(content.imageData, wrapper, ctx, mediaPath) + break + case 'empty': + // Render nothing — transparent placeholder + break + case 'unsupported': + renderUnsupportedPlaceholder(wrapper, mediaPath) + break + } +} + +/** + * Render an embedded PDF from EMF using pdfjs-dist. + * Populates the wrapper asynchronously — the wrapper is returned immediately. + */ +function renderEmfPdf( + pdfData: Uint8Array, + wrapper: HTMLElement, + node: PicNodeData, + ctx: RenderContext, + mediaPath: string +): void { + const cacheKey = `${mediaPath}:emf-pdf` + const cached = ctx.mediaUrlCache.get(cacheKey) + if (cached) { + wrapper.appendChild(createFillImage(cached)) + return + } + + renderPdfToImage(pdfData, node.size.w, node.size.h) + .then((url) => { + if (url) { + ctx.mediaUrlCache.set(cacheKey, url) + wrapper.appendChild(createFillImage(url)) + } + }) + .catch(() => { + // PDF rendering failed — leave wrapper empty (transparent) + }) +} + +/** + * Render an embedded DIB bitmap from EMF. + */ +function renderEmfBitmap( + imageData: ImageData, + wrapper: HTMLElement, + ctx: RenderContext, + mediaPath: string +): void { + const cacheKey = `${mediaPath}:emf-bitmap` + const cached = ctx.mediaUrlCache.get(cacheKey) + if (cached) { + wrapper.appendChild(createFillImage(cached)) + return + } + + const canvas = document.createElement('canvas') + canvas.width = imageData.width + canvas.height = imageData.height + const canvasCtx = canvas.getContext('2d') + if (!canvasCtx) return + + canvasCtx.putImageData(imageData, 0, 0) + canvas.toBlob((blob) => { + if (!blob) return + const url = URL.createObjectURL(blob) + ctx.mediaUrlCache.set(cacheKey, url) + wrapper.appendChild(createFillImage(url)) + }, 'image/png') +} + +/** + * Create an element that fills its container. + */ +function createFillImage(url: string): HTMLImageElement { + const img = document.createElement('img') + img.src = url + img.style.width = '100%' + img.style.height = '100%' + img.style.objectFit = 'fill' + img.style.display = 'block' + img.draggable = false + return img +} + +// --------------------------------------------------------------------------- +// Duotone Effect +// --------------------------------------------------------------------------- + +import type { SafeXmlNode } from '../parser/xml-parser' + +/** + * Apply a duotone effect to an image via canvas pixel manipulation. + * + * OOXML `` contains two color children (dark and light). + * The image is converted to grayscale, then black→color1, white→color2. + */ +function applyDuotoneFilter( + duotone: SafeXmlNode, + ctx: RenderContext, + img: HTMLImageElement, + _wrapper: HTMLElement +): void { + // Extract the two colors (first = dark, second = light) + const colorChildren = duotone.allChildren() + if (colorChildren.length < 2) return + + const { color: c1 } = resolveColor(colorChildren[0], ctx) + const { color: c2 } = resolveColor(colorChildren[1], ctx) + if (!c1 || !c2) return + + const hex1 = c1.startsWith('#') ? c1 : `#${c1}` + const hex2 = c2.startsWith('#') ? c2 : `#${c2}` + const rgb1 = hexToRgb(hex1) + const rgb2 = hexToRgb(hex2) + + // After the image loads, redraw it through a canvas with duotone applied + const apply = () => { + const w = img.naturalWidth + const h = img.naturalHeight + if (!w || !h) return + + const canvas = document.createElement('canvas') + canvas.width = w + canvas.height = h + const c = canvas.getContext('2d') + if (!c) return + + c.drawImage(img, 0, 0) + const imageData = c.getImageData(0, 0, w, h) + const data = imageData.data + + for (let i = 0; i < data.length; i += 4) { + // Convert to grayscale using luminance weights + const gray = (0.2126 * data[i] + 0.7152 * data[i + 1] + 0.0722 * data[i + 2]) / 255 + // Linearly interpolate between color1 (dark) and color2 (light) + data[i] = Math.round(rgb1.r + (rgb2.r - rgb1.r) * gray) + data[i + 1] = Math.round(rgb1.g + (rgb2.g - rgb1.g) * gray) + data[i + 2] = Math.round(rgb1.b + (rgb2.b - rgb1.b) * gray) + // Alpha channel (data[i+3]) is preserved + } + + c.putImageData(imageData, 0, 0) + img.src = canvas.toDataURL() + } + + if (img.complete && img.naturalWidth) { + apply() + } else { + img.addEventListener('load', apply, { once: true }) + } +} + +// --------------------------------------------------------------------------- +// Luminance Effect +// --------------------------------------------------------------------------- + +/** + * Apply a luminance (brightness/contrast) effect to an image. + * + * OOXML `` supports `bright` (additive brightness offset, 0–100000 = 0–100%) + * and `contrast` (multiplicative contrast, -100000 to 100000). + * e.g. bright="100000" makes the entire image white (preserving alpha). + */ +function applyLumEffect(lum: SafeXmlNode, img: HTMLImageElement): void { + const bright = (lum.numAttr('bright') ?? 0) / 100000 // 0–1 + const contrast = (lum.numAttr('contrast') ?? 0) / 100000 // -1 to 1 + + if (bright === 0 && contrast === 0) return + + const apply = () => { + const w = img.naturalWidth + const h = img.naturalHeight + if (!w || !h) return + + const canvas = document.createElement('canvas') + canvas.width = w + canvas.height = h + const c = canvas.getContext('2d') + if (!c) return + + c.drawImage(img, 0, 0) + const imageData = c.getImageData(0, 0, w, h) + const data = imageData.data + + for (let i = 0; i < data.length; i += 4) { + for (let ch = 0; ch < 3; ch++) { + // Normalize to 0–1 + let v = data[i + ch] / 255 + // Apply contrast (expand/compress around 0.5) + if (contrast !== 0) { + v = 0.5 + (v - 0.5) * (1 + contrast) + } + // Apply additive brightness offset + v += bright + data[i + ch] = Math.round(Math.max(0, Math.min(255, v * 255))) + } + // Alpha preserved + } + + c.putImageData(imageData, 0, 0) + img.src = canvas.toDataURL() + } + + if (img.complete && img.naturalWidth) { + apply() + } else { + img.addEventListener('load', apply, { once: true }) + } +} + +// --------------------------------------------------------------------------- +// BiLevel Effect +// --------------------------------------------------------------------------- + +/** + * Apply a bi-level (threshold) effect to an image. + * + * OOXML `` converts the image to black and white. + * Each pixel's luminance is compared to the threshold (0–100000 = 0–100%). + * Pixels above become white, pixels below become black. Alpha is preserved. + */ +function applyBiLevelEffect(biLevel: SafeXmlNode, img: HTMLImageElement): void { + const thresh = (biLevel.numAttr('thresh') ?? 50000) / 100000 // 0–1 + + const apply = () => { + const w = img.naturalWidth + const h = img.naturalHeight + if (!w || !h) return + + const canvas = document.createElement('canvas') + canvas.width = w + canvas.height = h + const c = canvas.getContext('2d') + if (!c) return + + c.drawImage(img, 0, 0) + const imageData = c.getImageData(0, 0, w, h) + const data = imageData.data + + for (let i = 0; i < data.length; i += 4) { + const gray = (0.2126 * data[i] + 0.7152 * data[i + 1] + 0.0722 * data[i + 2]) / 255 + const val = gray >= thresh ? 255 : 0 + data[i] = val + data[i + 1] = val + data[i + 2] = val + // Alpha preserved + } + + c.putImageData(imageData, 0, 0) + img.src = canvas.toDataURL() + } + + if (img.complete && img.naturalWidth) { + apply() + } else { + img.addEventListener('load', apply, { once: true }) + } +} diff --git a/apps/sim/lib/pptx-renderer/renderer/predefined-table-styles.ts b/apps/sim/lib/pptx-renderer/renderer/predefined-table-styles.ts new file mode 100644 index 00000000000..d02eeef85eb --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/predefined-table-styles.ts @@ -0,0 +1,805 @@ +/** + * Predefined (built-in) Office table styles. + * + * PowerPoint has 74 predefined table styles that exist natively but are NOT + * embedded in the PPTX's ppt/tableStyles.xml. Any PPTX can reference them by + * UUID. This module generates synthetic XML matching the schema + * so they flow through the existing rendering pipeline unchanged. + * + * Derived from LibreOffice's predefined-table-styles.cxx (MPL-2.0) and + * cross-verified against the Microsoft OOXML predefined style map. + */ + +import { parseXml, type SafeXmlNode } from '../parser/xml-parser' + +// --------------------------------------------------------------------------- +// UUID → (styleName, accent) map — 74 entries across 11 style groups +// --------------------------------------------------------------------------- + +const styleIdMap = new Map([ + // Themed-Style-1 + ['{2D5ABB26-0587-4C30-8999-92F81FD0307C}', ['Themed-Style-1', '']], + ['{3C2FFA5D-87B4-456A-9821-1D502468CF0F}', ['Themed-Style-1', 'accent1']], + ['{284E427A-3D55-4303-BF80-6455036E1DE7}', ['Themed-Style-1', 'accent2']], + ['{69C7853C-536D-4A76-A0AE-DD22124D55A5}', ['Themed-Style-1', 'accent3']], + ['{775DCB02-9BB8-47FD-8907-85C794F793BA}', ['Themed-Style-1', 'accent4']], + ['{35758FB7-9AC5-4552-8A53-C91805E547FA}', ['Themed-Style-1', 'accent5']], + ['{08FB837D-C827-4EFA-A057-4D05807E0F7C}', ['Themed-Style-1', 'accent6']], + + // Themed-Style-2 + ['{5940675A-B579-460E-94D1-54222C63F5DA}', ['Themed-Style-2', '']], + ['{D113A9D2-9D6B-4929-AA2D-F23B5EE8CBE7}', ['Themed-Style-2', 'accent1']], + ['{18603FDC-E32A-4AB5-989C-0864C3EAD2B8}', ['Themed-Style-2', 'accent2']], + ['{306799F8-075E-4A3A-A7F6-7FBC6576F1A4}', ['Themed-Style-2', 'accent3']], + ['{E269D01E-BC32-4049-B463-5C60D7B0CCD2}', ['Themed-Style-2', 'accent4']], + ['{327F97BB-C833-4FB7-BDE5-3F7075034690}', ['Themed-Style-2', 'accent5']], + ['{638B1855-1B75-4FBE-930C-398BA8C253C6}', ['Themed-Style-2', 'accent6']], + + // Light-Style-1 + ['{9D7B26C5-4107-4FEC-AEDC-1716B250A1EF}', ['Light-Style-1', '']], + ['{3B4B98B0-60AC-42C2-AFA5-B58CD77FA1E5}', ['Light-Style-1', 'accent1']], + ['{0E3FDE45-AF77-4B5C-9715-49D594BDF05E}', ['Light-Style-1', 'accent2']], + ['{C083E6E3-FA7D-4D7B-A595-EF9225AFEA82}', ['Light-Style-1', 'accent3']], + ['{D27102A9-8310-4765-A935-A1911B00CA55}', ['Light-Style-1', 'accent4']], + ['{5FD0F851-EC5A-4D38-B0AD-8093EC10F338}', ['Light-Style-1', 'accent5']], + ['{68D230F3-CF80-4859-8CE7-A43EE81993B5}', ['Light-Style-1', 'accent6']], + + // Light-Style-2 + ['{7E9639D4-E3E2-4D34-9284-5A2195B3D0D7}', ['Light-Style-2', '']], + ['{69012ECD-51FC-41F1-AA8D-1B2483CD663E}', ['Light-Style-2', 'accent1']], + ['{72833802-FEF1-4C79-8D5D-14CF1EAF98D9}', ['Light-Style-2', 'accent2']], + ['{F2DE63D5-997A-4646-A377-4702673A728D}', ['Light-Style-2', 'accent3']], + ['{17292A2E-F333-43FB-9621-5CBBE7FDCDCB}', ['Light-Style-2', 'accent4']], + ['{5A111915-BE36-4E01-A7E5-04B1672EAD32}', ['Light-Style-2', 'accent5']], + ['{912C8C85-51F0-491E-9774-3900AFEF0FD7}', ['Light-Style-2', 'accent6']], + + // Light-Style-3 + ['{616DA210-FB5B-4158-B5E0-FEB733F419BA}', ['Light-Style-3', '']], + ['{BC89EF96-8CEA-46FF-86C4-4CE0E7609802}', ['Light-Style-3', 'accent1']], + ['{5DA37D80-6434-44D0-A028-1B22A696006F}', ['Light-Style-3', 'accent2']], + ['{8799B23B-EC83-4686-B30A-512413B5E67A}', ['Light-Style-3', 'accent3']], + ['{ED083AE6-46FA-4A59-8FB0-9F97EB10719F}', ['Light-Style-3', 'accent4']], + ['{BDBED569-4797-4DF1-A0F4-6AAB3CD982D8}', ['Light-Style-3', 'accent5']], + ['{E8B1032C-EA38-4F05-BA0D-38AFFFC7BED3}', ['Light-Style-3', 'accent6']], + + // Medium-Style-1 + ['{793D81CF-94F2-401A-BA57-92F5A7B2D0C5}', ['Medium-Style-1', '']], + ['{B301B821-A1FF-4177-AEE7-76D212191A09}', ['Medium-Style-1', 'accent1']], + ['{9DCAF9ED-07DC-4A11-8D7F-57B35C25682E}', ['Medium-Style-1', 'accent2']], + ['{1FECB4D8-DB02-4DC6-A0A2-4F2EBAE1DC90}', ['Medium-Style-1', 'accent3']], + ['{1E171933-4619-4E11-9A3F-F7608DF75F80}', ['Medium-Style-1', 'accent4']], + ['{FABFCF23-3B69-468F-B69F-88F6DE6A72F2}', ['Medium-Style-1', 'accent5']], + ['{10A1B5D5-9B99-4C35-A422-299274C87663}', ['Medium-Style-1', 'accent6']], + + // Medium-Style-2 + ['{073A0DAA-6AF3-43AB-8588-CEC1D06C72B9}', ['Medium-Style-2', '']], + ['{5C22544A-7EE6-4342-B048-85BDC9FD1C3A}', ['Medium-Style-2', 'accent1']], + ['{21E4AEA4-8DFA-4A89-87EB-49C32662AFE0}', ['Medium-Style-2', 'accent2']], + ['{F5AB1C69-6EDB-4FF4-983F-18BD219EF322}', ['Medium-Style-2', 'accent3']], + ['{00A15C55-8517-42AA-B614-E9B94910E393}', ['Medium-Style-2', 'accent4']], + ['{7DF18680-E054-41AD-8BC1-D1AEF772440D}', ['Medium-Style-2', 'accent5']], + ['{93296810-A885-4BE3-A3E7-6D5BEEA58F35}', ['Medium-Style-2', 'accent6']], + + // Medium-Style-3 + ['{8EC20E35-A176-4012-BC5E-935CFFF8708E}', ['Medium-Style-3', '']], + ['{6E25E649-3F16-4E02-A733-19D2CDBF48F0}', ['Medium-Style-3', 'accent1']], + ['{85BE263C-DBD7-4A20-BB59-AAB30ACAA65A}', ['Medium-Style-3', 'accent2']], + ['{EB344D84-9AFB-497E-A393-DC336BA19D2E}', ['Medium-Style-3', 'accent3']], + ['{EB9631B5-78F2-41C9-869B-9F39066F8104}', ['Medium-Style-3', 'accent4']], + ['{74C1A8A3-306A-4EB7-A6B1-4F7E0EB9C5D6}', ['Medium-Style-3', 'accent5']], + ['{2A488322-F2BA-4B5B-9748-0D474271808F}', ['Medium-Style-3', 'accent6']], + + // Medium-Style-4 + ['{D7AC3CCA-C797-4891-BE02-D94E43425B78}', ['Medium-Style-4', '']], + ['{69CF1AB2-1976-4502-BF36-3FF5EA218861}', ['Medium-Style-4', 'accent1']], + ['{8A107856-5554-42FB-B03E-39F5DBC370BA}', ['Medium-Style-4', 'accent2']], + ['{0505E3EF-67EA-436B-97B2-0124C06EBD24}', ['Medium-Style-4', 'accent3']], + ['{C4B1156A-380E-4F78-BDF5-A606A8083BF9}', ['Medium-Style-4', 'accent4']], + ['{22838BEF-8BB2-4498-84A7-C5851F593DF1}', ['Medium-Style-4', 'accent5']], + ['{16D9F66E-5EB9-4882-86FB-DCBF35E3C3E4}', ['Medium-Style-4', 'accent6']], + + // Dark-Style-1 + ['{E8034E78-7F5D-4C2E-B375-FC64B27BC917}', ['Dark-Style-1', '']], + ['{125E5076-3810-47DD-B79F-674D7AD40C01}', ['Dark-Style-1', 'accent1']], + ['{37CE84F3-28C3-443E-9E96-99CF82512B78}', ['Dark-Style-1', 'accent2']], + ['{D03447BB-5D67-496B-8E87-E561075AD55C}', ['Dark-Style-1', 'accent3']], + ['{E929F9F4-4A8F-4326-A1B4-22849713DDAB}', ['Dark-Style-1', 'accent4']], + ['{8FD4443E-F989-4FC4-A0C8-D5A2AF1F390B}', ['Dark-Style-1', 'accent5']], + ['{AF606853-7671-496A-8E4F-DF71F8EC918B}', ['Dark-Style-1', 'accent6']], + + // Dark-Style-2 (only 4 variants) + ['{5202B0CA-FC54-4496-8BCA-5EF66A818D29}', ['Dark-Style-2', '']], + ['{0660B408-B3CF-4A94-85FC-2B1E0A45F4A2}', ['Dark-Style-2', 'accent1']], + ['{91EBBBCC-DAD2-459C-BE2E-F6DE35CF9A28}', ['Dark-Style-2', 'accent3']], + ['{46F890A9-2807-4EBB-B81D-B2AA78EC7F39}', ['Dark-Style-2', 'accent5']], +]) + +// --------------------------------------------------------------------------- +// XML helpers — reduce boilerplate in style generators +// --------------------------------------------------------------------------- + +const NS = 'xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/main"' + +/** Solid fill with a scheme color and optional transform */ +function fillSolid(scheme: string, transform?: string): string { + const mod = transform ? `` : '' + return `${mod}` +} + +/** A border element with scheme color */ +function borderLn(scheme: string, transform?: string): string { + const mod = transform ? `` : '' + return `${mod}` +} + +/** Text color element within tcTxStyle */ +function tcTxStyle(scheme: string, bold?: boolean): string { + const bAttr = bold ? ' b="on"' : '' + const colorEl = scheme ? `` : '' + return `${colorEl}` +} + +/** Style part with optional fill, borders, and text style */ +function stylePart( + tag: string, + opts: { + textColor?: string + bold?: boolean + fill?: string + borders?: Record + } +): string { + if (!opts.textColor && !opts.bold && !opts.fill && !opts.borders) return '' + const parts: string[] = [``] + if (opts.textColor || opts.bold) parts.push(tcTxStyle(opts.textColor ?? '', opts.bold)) + parts.push('') + if (opts.fill) parts.push(opts.fill) + if (opts.borders) { + parts.push('') + for (const [side, ln] of Object.entries(opts.borders)) { + parts.push(`${ln}`) + } + parts.push('') + } + parts.push('') + parts.push(``) + return parts.join('') +} + +// --------------------------------------------------------------------------- +// Style group XML generators +// --------------------------------------------------------------------------- + +function themedStyle1(accent: string, styleId: string): string { + const hasAccent = accent !== '' + const accentVal = hasAccent ? accent : 'tx1' + const parts: string[] = [] + + if (hasAccent) { + // wholeTbl: text=dk1, borders=accent on all sides + const allBorders: Record = { + left: borderLn(accentVal), + right: borderLn(accentVal), + top: borderLn(accentVal), + bottom: borderLn(accentVal), + insideH: borderLn(accentVal), + insideV: borderLn(accentVal), + } + parts.push(stylePart('wholeTbl', { textColor: 'dk1', borders: allBorders })) + + // band1H/V: accent + alpha(40000) + const bandFill = fillSolid(accentVal, `alpha val="40000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=lt1, bold, fill=accent, borders=accent (+ bottom=lt1) + parts.push( + stylePart('firstRow', { + textColor: 'lt1', + bold: true, + fill: fillSolid(accentVal), + borders: { + left: borderLn(accentVal), + right: borderLn(accentVal), + top: borderLn(accentVal), + bottom: borderLn('lt1'), + }, + }) + ) + + // lastRow: bold, borders=accent + parts.push( + stylePart('lastRow', { + bold: true, + borders: { + left: borderLn(accentVal), + right: borderLn(accentVal), + top: borderLn(accentVal), + bottom: borderLn(accentVal), + }, + }) + ) + + // firstCol/lastCol: bold, borders=accent (+ insideH) + const colBorders: Record = { + left: borderLn(accentVal), + right: borderLn(accentVal), + top: borderLn(accentVal), + bottom: borderLn(accentVal), + insideH: borderLn(accentVal), + } + parts.push(stylePart('firstCol', { bold: true, borders: colBorders })) + parts.push(stylePart('lastCol', { bold: true, borders: colBorders })) + } else { + // No accent: text=tx1, band with alpha + parts.push(stylePart('wholeTbl', { textColor: 'tx1' })) + const bandFill = fillSolid('tx1', `alpha val="40000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + } + + return wrapTblStyle(styleId, 'Themed-Style-1', parts.join('')) +} + +function themedStyle2(accent: string, styleId: string): string { + const hasAccent = accent !== '' + const parts: string[] = [] + + if (hasAccent) { + const accentVal = accent + // tblBg: accent fill + const tblBg = `` + + // wholeTbl: text=lt1, outer borders=accent+tint(50000) + const outerBorders: Record = { + left: borderLn(accentVal, `tint val="50000"`), + right: borderLn(accentVal, `tint val="50000"`), + top: borderLn(accentVal, `tint val="50000"`), + bottom: borderLn(accentVal, `tint val="50000"`), + } + parts.push(stylePart('wholeTbl', { textColor: 'lt1', borders: outerBorders })) + + // band1H/V: lt1 + alpha(20000) + const bandFill = fillSolid('lt1', `alpha val="20000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=lt1, bold, bottom border=lt1 + parts.push( + stylePart('firstRow', { textColor: 'lt1', bold: true, borders: { bottom: borderLn('lt1') } }) + ) + // lastRow: bold, top border=lt1 + parts.push(stylePart('lastRow', { bold: true, borders: { top: borderLn('lt1') } })) + // firstCol: bold, right border=lt1 + parts.push(stylePart('firstCol', { bold: true, borders: { right: borderLn('lt1') } })) + // lastCol: bold, left border=lt1 + parts.push(stylePart('lastCol', { bold: true, borders: { left: borderLn('lt1') } })) + + return wrapTblStyle(styleId, 'Themed-Style-2', tblBg + parts.join('')) + } + // No accent: text=tx1 (implicit), outer borders=tx1+tint(50000), inside borders=tx1 + const outerBorders: Record = { + left: borderLn('tx1', `tint val="50000"`), + right: borderLn('tx1', `tint val="50000"`), + top: borderLn('tx1', `tint val="50000"`), + bottom: borderLn('tx1', `tint val="50000"`), + insideH: borderLn('tx1'), + insideV: borderLn('tx1'), + } + parts.push(stylePart('wholeTbl', { borders: outerBorders })) + + const bandFill = fillSolid('tx1', `alpha val="20000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + return wrapTblStyle(styleId, 'Themed-Style-2', parts.join('')) +} + +function lightStyle1(accent: string, styleId: string): string { + const accentVal = accent || 'tx1' + const parts: string[] = [] + + // wholeTbl: text=tx1, top/bottom borders + parts.push( + stylePart('wholeTbl', { + textColor: 'tx1', + borders: { + top: borderLn(accentVal), + bottom: borderLn(accentVal), + }, + }) + ) + + // band1H/V: accent + alpha(20000) + const bandFill = fillSolid(accentVal, `alpha val="20000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=tx1, bold, bottom border + parts.push( + stylePart('firstRow', { + textColor: 'tx1', + bold: true, + borders: { bottom: borderLn(accentVal) }, + }) + ) + + // lastRow: bold, top border + parts.push(stylePart('lastRow', { bold: true, borders: { top: borderLn(accentVal) } })) + + // firstCol: bold text + parts.push(stylePart('firstCol', { textColor: 'tx1', bold: true })) + // lastCol: bold text + parts.push(stylePart('lastCol', { textColor: 'tx1', bold: true })) + + return wrapTblStyle(styleId, 'Light-Style-1', parts.join('')) +} + +function lightStyle2(accent: string, styleId: string): string { + const accentVal = accent || 'tx1' + const parts: string[] = [] + + // wholeTbl: text=tx1, all 4 outer borders + parts.push( + stylePart('wholeTbl', { + textColor: 'tx1', + borders: { + left: borderLn(accentVal), + right: borderLn(accentVal), + top: borderLn(accentVal), + bottom: borderLn(accentVal), + }, + }) + ) + + // band1H: top+bottom borders + parts.push( + stylePart('band1H', { + borders: { + top: borderLn(accentVal), + bottom: borderLn(accentVal), + }, + }) + ) + + // band1V/band2V: left+right borders + parts.push( + stylePart('band1V', { + borders: { left: borderLn(accentVal), right: borderLn(accentVal) }, + }) + ) + parts.push( + stylePart('band2V', { + borders: { left: borderLn(accentVal), right: borderLn(accentVal) }, + }) + ) + + // firstRow: text=bg1, bold, fill=accent + parts.push(stylePart('firstRow', { textColor: 'bg1', bold: true, fill: fillSolid(accentVal) })) + + // lastRow: bold, top border + parts.push(stylePart('lastRow', { bold: true, borders: { top: borderLn(accentVal) } })) + + // firstCol: bold + parts.push(stylePart('firstCol', { bold: true })) + // lastCol: bold + parts.push(stylePart('lastCol', { bold: true })) + + return wrapTblStyle(styleId, 'Light-Style-2', parts.join('')) +} + +function lightStyle3(accent: string, styleId: string): string { + const accentVal = accent || 'tx1' + const parts: string[] = [] + + // wholeTbl: text=tx1, all 6 borders + parts.push( + stylePart('wholeTbl', { + textColor: 'tx1', + borders: { + left: borderLn(accentVal), + right: borderLn(accentVal), + top: borderLn(accentVal), + bottom: borderLn(accentVal), + insideH: borderLn(accentVal), + insideV: borderLn(accentVal), + }, + }) + ) + + // band1H/V: accent + alpha(20000) + const bandFill = fillSolid(accentVal, `alpha val="20000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=accent, bold, bottom border + parts.push( + stylePart('firstRow', { + textColor: accentVal, + bold: true, + borders: { bottom: borderLn(accentVal) }, + }) + ) + + // lastRow: bold, top border + parts.push(stylePart('lastRow', { bold: true, borders: { top: borderLn(accentVal) } })) + + // firstCol: bold + parts.push(stylePart('firstCol', { bold: true })) + // lastCol: bold + parts.push(stylePart('lastCol', { bold: true })) + + return wrapTblStyle(styleId, 'Light-Style-3', parts.join('')) +} + +function mediumStyle1(accent: string, styleId: string): string { + const accentVal = accent || 'dk1' + const parts: string[] = [] + + // wholeTbl: text=dk1, fill=lt1, borders (left/right/top/bottom/insideH) + parts.push( + stylePart('wholeTbl', { + textColor: 'dk1', + fill: fillSolid('lt1'), + borders: { + left: borderLn(accentVal), + right: borderLn(accentVal), + top: borderLn(accentVal), + bottom: borderLn(accentVal), + insideH: borderLn(accentVal), + }, + }) + ) + + // band1H/V: accent + tint(20000) + const bandFill = fillSolid(accentVal, `tint val="20000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=lt1, bold, fill=accent + parts.push(stylePart('firstRow', { textColor: 'lt1', bold: true, fill: fillSolid(accentVal) })) + + // lastRow: bold, fill=lt1, top border + parts.push( + stylePart('lastRow', { + bold: true, + fill: fillSolid('lt1'), + borders: { top: borderLn(accentVal) }, + }) + ) + + // firstCol: bold + parts.push(stylePart('firstCol', { bold: true })) + // lastCol: bold + parts.push(stylePart('lastCol', { bold: true })) + + return wrapTblStyle(styleId, 'Medium-Style-1', parts.join('')) +} + +function mediumStyle2(accent: string, styleId: string): string { + const accentVal = accent || 'dk1' + const parts: string[] = [] + + // wholeTbl: text=dk1, fill=accent+tint(20000), all borders=lt1 + parts.push( + stylePart('wholeTbl', { + textColor: 'dk1', + fill: fillSolid(accentVal, `tint val="20000"`), + borders: { + left: borderLn('lt1'), + right: borderLn('lt1'), + top: borderLn('lt1'), + bottom: borderLn('lt1'), + insideH: borderLn('lt1'), + insideV: borderLn('lt1'), + }, + }) + ) + + // band1H/V: accent + tint(40000) + const bandFill = fillSolid(accentVal, `tint val="40000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=lt1, bold, fill=accent, bottom border=lt1 + parts.push( + stylePart('firstRow', { + textColor: 'lt1', + bold: true, + fill: fillSolid(accentVal), + borders: { bottom: borderLn('lt1') }, + }) + ) + + // lastRow: text=lt1, bold, fill=accent, top border=lt1 + parts.push( + stylePart('lastRow', { + textColor: 'lt1', + bold: true, + fill: fillSolid(accentVal), + borders: { top: borderLn('lt1') }, + }) + ) + + // firstCol: text=lt1, bold, fill=accent + parts.push(stylePart('firstCol', { textColor: 'lt1', bold: true, fill: fillSolid(accentVal) })) + + // lastCol: text=lt1, bold, fill=accent + parts.push(stylePart('lastCol', { textColor: 'lt1', bold: true, fill: fillSolid(accentVal) })) + + return wrapTblStyle(styleId, 'Medium-Style-2', parts.join('')) +} + +function mediumStyle3(accent: string, styleId: string): string { + const accentVal = accent || 'dk1' + const parts: string[] = [] + + // wholeTbl: text=dk1, fill=lt1, top/bottom borders=dk1 + parts.push( + stylePart('wholeTbl', { + textColor: 'dk1', + fill: fillSolid('lt1'), + borders: { + top: borderLn('dk1'), + bottom: borderLn('dk1'), + }, + }) + ) + + // band1H/V: dk1 + tint(20000) + const bandFill = fillSolid('dk1', `tint val="20000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=lt1, bold, fill=accent, bottom border=dk1 + parts.push( + stylePart('firstRow', { + textColor: 'lt1', + bold: true, + fill: fillSolid(accentVal), + borders: { bottom: borderLn('dk1') }, + }) + ) + + // lastRow: bold, fill=lt1, top border=dk1 + parts.push( + stylePart('lastRow', { + bold: true, + fill: fillSolid('lt1'), + borders: { top: borderLn('dk1') }, + }) + ) + + // firstCol: text=lt1, bold, fill=accent + parts.push(stylePart('firstCol', { textColor: 'lt1', bold: true, fill: fillSolid(accentVal) })) + + // lastCol: text=lt1, bold, fill=accent + parts.push(stylePart('lastCol', { textColor: 'lt1', bold: true, fill: fillSolid(accentVal) })) + + return wrapTblStyle(styleId, 'Medium-Style-3', parts.join('')) +} + +function mediumStyle4(accent: string, styleId: string): string { + const accentVal = accent || 'dk1' + const parts: string[] = [] + + // wholeTbl: text=dk1, fill=accent+tint(20000), all 6 borders=accent + parts.push( + stylePart('wholeTbl', { + textColor: 'dk1', + fill: fillSolid(accentVal, `tint val="20000"`), + borders: { + left: borderLn(accentVal), + right: borderLn(accentVal), + top: borderLn(accentVal), + bottom: borderLn(accentVal), + insideH: borderLn(accentVal), + insideV: borderLn(accentVal), + }, + }) + ) + + // band1H/V: accent + tint(40000) + const bandFill = fillSolid(accentVal, `tint val="40000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=accent, bold, fill=accent+tint(20000) + parts.push( + stylePart('firstRow', { + textColor: accentVal, + bold: true, + fill: fillSolid(accentVal, `tint val="20000"`), + }) + ) + + // lastRow: bold, fill=dk1+tint(20000), top border=dk1 + parts.push( + stylePart('lastRow', { + bold: true, + fill: fillSolid('dk1', `tint val="20000"`), + borders: { top: borderLn('dk1') }, + }) + ) + + // firstCol: bold + parts.push(stylePart('firstCol', { bold: true })) + // lastCol: bold + parts.push(stylePart('lastCol', { bold: true })) + + return wrapTblStyle(styleId, 'Medium-Style-4', parts.join('')) +} + +function darkStyle1(accent: string, styleId: string): string { + const hasAccent = accent !== '' + const accentVal = hasAccent ? accent : 'dk1' + const transformType = hasAccent ? 'shade' : 'tint' + const parts: string[] = [] + + // wholeTbl: text=dk1, fill=accent+shade/tint(20000) + parts.push( + stylePart('wholeTbl', { + textColor: 'dk1', + fill: fillSolid(accentVal, `${transformType} val="20000"`), + }) + ) + + // band1H/V: accent + shade/tint(40000) + const bandFill = fillSolid(accentVal, `${transformType} val="40000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=lt1, bold, fill=dk1, bottom border=lt1 + parts.push( + stylePart('firstRow', { + textColor: 'lt1', + bold: true, + fill: fillSolid('dk1'), + borders: { bottom: borderLn('lt1') }, + }) + ) + + // lastRow: bold, fill=accent+shade/tint(20000), top border=lt1 + parts.push( + stylePart('lastRow', { + bold: true, + fill: fillSolid(accentVal), + borders: { top: borderLn('lt1') }, + }) + ) + + // firstCol: bold, fill=accent+shade/tint(60000), right border=lt1 + parts.push( + stylePart('firstCol', { + bold: true, + fill: fillSolid(accentVal, `${transformType} val="60000"`), + borders: { right: borderLn('lt1') }, + }) + ) + + // lastCol: bold, fill=accent+shade/tint(60000), left border=lt1 + parts.push( + stylePart('lastCol', { + bold: true, + fill: fillSolid(accentVal, `${transformType} val="60000"`), + borders: { left: borderLn('lt1') }, + }) + ) + + return wrapTblStyle(styleId, 'Dark-Style-1', parts.join('')) +} + +function darkStyle2(accent: string, styleId: string): string { + const accentVal = accent || 'dk1' + const parts: string[] = [] + + // Determine firstRow fill: accent-shift logic + let firstRowFillColor: string + if (accent === '') firstRowFillColor = 'dk1' + else if (accent === 'accent1') firstRowFillColor = 'accent2' + else if (accent === 'accent3') firstRowFillColor = 'accent4' + else if (accent === 'accent5') firstRowFillColor = 'accent6' + else firstRowFillColor = accentVal + + // wholeTbl: text=dk1, fill=accent+tint(20000) + parts.push( + stylePart('wholeTbl', { + textColor: 'dk1', + fill: fillSolid(accentVal, `tint val="20000"`), + }) + ) + + // band1H/V: accent + tint(40000) + const bandFill = fillSolid(accentVal, `tint val="40000"`) + parts.push(stylePart('band1H', { fill: bandFill })) + parts.push(stylePart('band1V', { fill: bandFill })) + + // firstRow: text=lt1, bold, fill=firstRowFillColor + parts.push( + stylePart('firstRow', { + textColor: 'lt1', + bold: true, + fill: fillSolid(firstRowFillColor), + }) + ) + + // lastRow: bold, fill=accent+tint(20000), top border=dk1 + parts.push( + stylePart('lastRow', { + bold: true, + fill: fillSolid(accentVal, `tint val="20000"`), + borders: { top: borderLn('dk1') }, + }) + ) + + // firstCol: bold + parts.push(stylePart('firstCol', { bold: true })) + // lastCol: bold + parts.push(stylePart('lastCol', { bold: true })) + + return wrapTblStyle(styleId, 'Dark-Style-2', parts.join('')) +} + +// --------------------------------------------------------------------------- +// XML wrapper +// --------------------------------------------------------------------------- + +function wrapTblStyle(styleId: string, styleName: string, innerXml: string): string { + return `${innerXml}` +} + +// --------------------------------------------------------------------------- +// Style generator dispatch +// --------------------------------------------------------------------------- + +const styleGenerators: Record string> = { + 'Themed-Style-1': themedStyle1, + 'Themed-Style-2': themedStyle2, + 'Light-Style-1': lightStyle1, + 'Light-Style-2': lightStyle2, + 'Light-Style-3': lightStyle3, + 'Medium-Style-1': mediumStyle1, + 'Medium-Style-2': mediumStyle2, + 'Medium-Style-3': mediumStyle3, + 'Medium-Style-4': mediumStyle4, + 'Dark-Style-1': darkStyle1, + 'Dark-Style-2': darkStyle2, +} + +// --------------------------------------------------------------------------- +// Module-level cache & public API +// --------------------------------------------------------------------------- + +const cache = new Map() + +/** + * Get a predefined table style by its UUID. + * Returns the parsed SafeXmlNode (a:tblStyle element) or undefined if not a known predefined style. + * Results are cached — same UUID always returns the same instance. + */ +export function getPredefinedTableStyle(styleId: string): SafeXmlNode | undefined { + const cached = cache.get(styleId) + if (cached) return cached + + const entry = styleIdMap.get(styleId) + if (!entry) return undefined + + const [styleName, accent] = entry + const generator = styleGenerators[styleName] + if (!generator) return undefined + + const xml = generator(accent, styleId) + const node = parseXml(xml) + if (!node.exists()) return undefined + + cache.set(styleId, node) + return node +} + +/** Exported for testing: number of known predefined style UUIDs. */ +export const PREDEFINED_STYLE_COUNT = styleIdMap.size + +/** Exported for testing: all known style IDs. */ +export function getAllPredefinedStyleIds(): string[] { + return Array.from(styleIdMap.keys()) +} diff --git a/apps/sim/lib/pptx-renderer/renderer/render-context.ts b/apps/sim/lib/pptx-renderer/renderer/render-context.ts new file mode 100644 index 00000000000..16ec2131e64 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/render-context.ts @@ -0,0 +1,80 @@ +/** + * Render context — provides resolved theme/master/layout chain for a given slide. + */ + +import type { ECharts } from 'echarts' +import type { LayoutData } from '../model/layout' +import type { MasterData } from '../model/master' +import type { PresentationData } from '../model/presentation' +import type { SlideData } from '../model/slide' +import type { ThemeData } from '../model/theme' +import type { SafeXmlNode } from '../parser/xml-parser' + +export interface RenderContext { + presentation: PresentationData + slide: SlideData + theme: ThemeData + master: MasterData + layout: LayoutData + mediaUrlCache: Map // path -> blob URL + colorCache: Map + /** Shared set of live ECharts instances for explicit disposal. */ + chartInstances?: Set + /** Fill node from parent group's grpSpPr, used to resolve `a:grpFill` in children. */ + groupFillNode?: SafeXmlNode + /** + * Navigation callback for shape-level hyperlink actions (action buttons, clickable shapes). + * Called with target slide index (0-based) for `ppaction://hlinksldjump`, + * or with a URL string for external links. + */ + onNavigate?: (target: { slideIndex?: number; url?: string }) => void +} + +export function createRenderContext( + presentation: PresentationData, + slide: SlideData, + mediaUrlCache?: Map, + chartInstances?: Set +): RenderContext { + // Resolve the chain: slide -> layout -> master -> theme + const layoutPath = presentation.slideToLayout.get(slide.index) || '' + const masterPath = presentation.layoutToMaster.get(layoutPath) || '' + const themePath = presentation.masterToTheme.get(masterPath) || '' + + const layout: LayoutData = presentation.layouts.get(layoutPath) || { + placeholders: [], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + spTree: {} as any, + rels: new Map(), + showMasterSp: true, + } + + const master: MasterData = presentation.masters.get(masterPath) || { + colorMap: new Map(), + textStyles: {}, + placeholders: [], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + spTree: {} as any, + rels: new Map(), + } + + const theme: ThemeData = presentation.themes.get(themePath) || { + colorScheme: new Map(), + majorFont: { latin: 'Calibri', ea: '', cs: '' }, + minorFont: { latin: 'Calibri', ea: '', cs: '' }, + fillStyles: [], + lineStyles: [], + effectStyles: [], + } + + return { + presentation, + slide, + theme, + master, + layout, + mediaUrlCache: mediaUrlCache ?? new Map(), + colorCache: new Map(), + chartInstances, + } +} diff --git a/apps/sim/lib/pptx-renderer/renderer/shape-renderer.ts b/apps/sim/lib/pptx-renderer/renderer/shape-renderer.ts new file mode 100644 index 00000000000..58ed7f9c70d --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/shape-renderer.ts @@ -0,0 +1,1522 @@ +/** + * Shape renderer — converts ShapeNodeData into positioned HTML/SVG elements. + */ + +import type { LineEndInfo, ShapeNodeData, TextBody } from '../model/nodes/shape-node' +import type { RenderContext } from './render-context' + +/** True if the text body has at least one non-empty run (avoids covering shapes with empty placeholder text). */ +function hasVisibleText(textBody: TextBody): boolean { + for (const p of textBody.paragraphs) { + for (const r of p.runs) { + if (r.text != null && r.text.trim().length > 0) return true + } + } + return false +} + +import { emuToPx } from '../parser/units' +import type { SafeXmlNode } from '../parser/xml-parser' +import { renderCustomGeometry } from '../shapes/custom-geometry' +import { + getActionButtonIconPath, + getMultiPathPreset, + getPresetShapePath, + type PresetSubPath, +} from '../shapes/presets' +import { applyTint, hexToRgb, rgbToHex } from '../utils/color' +import { getOrCreateBlobUrl, resolveMediaPath } from '../utils/media' +import { isAllowedExternalUrl } from '../utils/url-safety' +import { + resolveColor, + resolveColorToCss, + resolveFill, + resolveGradientFill, + resolveGradientStroke, + resolveLineStyle, + resolveThemeFillReference, +} from './style-resolver' +import { renderTextBody } from './text-renderer' + +// --------------------------------------------------------------------------- +// Shape blipFill (image fill) — resolve to blob URL for reuse (e.g. SVG/PNG in process diagrams) +// --------------------------------------------------------------------------- + +/** Resolve shape blipFill to a blob URL so we can render it (e.g. slide 23 process graphic). */ +function resolveShapeBlipUrl(blipFill: SafeXmlNode, ctx: RenderContext): string | null { + const blip = blipFill.child('blip') + const embedId = blip.attr('embed') ?? blip.attr('r:embed') + if (!embedId) return null + const rel = ctx.slide.rels.get(embedId) + if (!rel) return null + const mediaPath = resolveMediaPath(rel.target) + const data = ctx.presentation.media.get(mediaPath) + if (!data) return null + return getOrCreateBlobUrl(mediaPath, data, ctx.mediaUrlCache) +} + +// --------------------------------------------------------------------------- +// Line End Marker (Arrowhead) Helpers +// --------------------------------------------------------------------------- + +let markerIdCounter = 0 +let gradientIdCounter = 0 + +function svgDashArrayForKind(dashKind: string, strokeWidth: number): string | null { + const w = Math.max(strokeWidth, 1) + switch (dashKind) { + case 'dot': + case 'sysDot': + return `${w},${w * 2}` + case 'dash': + case 'sysDash': + return `${w * 4},${w * 2}` + case 'lgDash': + return `${w * 8},${w * 3}` + case 'dashDot': + case 'sysDashDot': + return `${w * 4},${w * 2},${w},${w * 2}` + case 'lgDashDot': + return `${w * 8},${w * 3},${w},${w * 3}` + case 'lgDashDotDot': + case 'sysDashDotDot': + return `${w * 8},${w * 3},${w},${w * 2},${w},${w * 2}` + default: + return null + } +} + +function parseCssColorToRgb(color: string): { r: number; g: number; b: number } | null { + if (!color) return null + const hex = color.trim() + if (hex.startsWith('#')) { + return hexToRgb(hex) + } + const m = hex.match(/rgba?\(([^)]+)\)/i) + if (!m) return null + const parts = m[1].split(',').map((s) => Number.parseFloat(s.trim())) + if (parts.length < 3 || parts.some((v) => Number.isNaN(v))) return null + return { + r: Math.max(0, Math.min(255, parts[0])), + g: Math.max(0, Math.min(255, parts[1])), + b: Math.max(0, Math.min(255, parts[2])), + } +} + +function mixRgb( + base: { r: number; g: number; b: number }, + target: { r: number; g: number; b: number }, + t: number +): string { + const k = Math.max(0, Math.min(1, t)) + return rgbToHex( + base.r + (target.r - base.r) * k, + base.g + (target.g - base.g) * k, + base.b + (target.b - base.b) * k + ) +} + +/** + * Convert an OOXML gradient angle (in degrees, where 0 = right-to-left in OOXML coords) + * to SVG linearGradient x1/y1/x2/y2 coordinates (as percentages). + */ +function angleToSvgGradientCoords(angleDeg: number): { + x1: string + y1: string + x2: string + y2: string +} { + // OOXML: 0° = left-to-right, 90° = top-to-bottom (clockwise) + // Convert to radians for trig + const rad = (angleDeg * Math.PI) / 180 + // Calculate direction vector + const x2 = Math.round(50 + 50 * Math.cos(rad)) + const y2 = Math.round(50 + 50 * Math.sin(rad)) + const x1 = Math.round(50 - 50 * Math.cos(rad)) + const y1 = Math.round(50 - 50 * Math.sin(rad)) + return { + x1: `${x1}%`, + y1: `${y1}%`, + x2: `${x2}%`, + y2: `${y2}%`, + } +} + +/** + * Get the marker size multiplier based on OOXML size string. + */ +function getMarkerSize(size: string | undefined): number { + switch (size) { + case 'sm': + return 0.5 + case 'lg': + return 1.5 + default: + return 1.0 // 'med' or undefined + } +} + +/** + * Create an SVG marker element for a line end (arrowhead). + */ +function createArrowMarker( + svgNs: string, + info: LineEndInfo, + strokeColor: string, + strokeWidth: number, + isHead: boolean +): SVGMarkerElement | null { + const marker = document.createElementNS(svgNs, 'marker') as SVGMarkerElement + const id = `arrow-marker-${++markerIdCounter}` + marker.setAttribute('id', id) + // Use userSpaceOnUse so markerWidth/Height are in SVG pixels directly. + // This avoids the quadratic blow-up from markerUnits="strokeWidth" combined + // with a base size that already factors in stroke width. + marker.setAttribute('markerUnits', 'userSpaceOnUse') + marker.setAttribute('orient', 'auto') + + const wMul = getMarkerSize(info.w) + const lenMul = getMarkerSize(info.len) + // Arrow size proportional to stroke width with balanced floor: + // avoid tiny markers, but do not overgrow relative to line length. + const baseLen = Math.max(strokeWidth * 4, 6.5) + const baseW = Math.max(strokeWidth * 3.2, 5) + const markerW = baseLen * lenMul + const markerH = baseW * wMul + + switch (info.type) { + case 'triangle': + case 'arrow': { + marker.setAttribute('viewBox', '0 0 10 10') + // Anchor the arrow tip on the path endpoint so it does not intrude into target shapes. + marker.setAttribute('refX', '10') + marker.setAttribute('refY', '5') + marker.setAttribute('markerWidth', String(markerW)) + marker.setAttribute('markerHeight', String(markerH)) + + const polygon = document.createElementNS(svgNs, 'polygon') + if (isHead) { + // headEnd at marker-start: arrow points backward (-x / left) + polygon.setAttribute('points', '0,5 10,0 10,10') + } else { + // tailEnd at marker-end: arrow points forward (+x / right) + polygon.setAttribute('points', '10,5 0,0 0,10') + } + polygon.setAttribute('fill', strokeColor) + marker.appendChild(polygon) + break + } + case 'stealth': { + marker.setAttribute('viewBox', '0 0 10 10') + marker.setAttribute('refX', '10') + marker.setAttribute('refY', '5') + marker.setAttribute('markerWidth', String(markerW)) + marker.setAttribute('markerHeight', String(markerH)) + + const path = document.createElementNS(svgNs, 'path') + if (isHead) { + // headEnd at marker-start: arrow points backward (-x / left) + path.setAttribute('d', 'M0,5 L10,0 L7,5 L10,10 Z') + } else { + // tailEnd at marker-end: arrow points forward (+x / right) + path.setAttribute('d', 'M10,5 L0,0 L3,5 L0,10 Z') + } + path.setAttribute('fill', strokeColor) + marker.appendChild(path) + break + } + case 'diamond': { + marker.setAttribute('viewBox', '0 0 10 10') + marker.setAttribute('refX', '5') + marker.setAttribute('refY', '5') + marker.setAttribute('markerWidth', String(markerW)) + marker.setAttribute('markerHeight', String(markerH)) + + const diamond = document.createElementNS(svgNs, 'polygon') + diamond.setAttribute('points', '5,0 10,5 5,10 0,5') + diamond.setAttribute('fill', strokeColor) + marker.appendChild(diamond) + break + } + case 'oval': { + marker.setAttribute('viewBox', '0 0 10 10') + marker.setAttribute('refX', '5') + marker.setAttribute('refY', '5') + marker.setAttribute('markerWidth', String(markerW)) + marker.setAttribute('markerHeight', String(markerH)) + + const circle = document.createElementNS(svgNs, 'circle') + circle.setAttribute('cx', '5') + circle.setAttribute('cy', '5') + circle.setAttribute('r', '4') + circle.setAttribute('fill', strokeColor) + marker.appendChild(circle) + break + } + default: + return null + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + + ;(marker as any)._markerId = id + return marker +} + +/** Read headEnd/tailEnd from an OOXML a:ln node (e.g. theme line style). */ +function getLineEndsFromLn(ln: SafeXmlNode): { headEnd?: LineEndInfo; tailEnd?: LineEndInfo } { + const out: { headEnd?: LineEndInfo; tailEnd?: LineEndInfo } = {} + const he = ln.child('headEnd') + if (he.exists()) { + const t = he.attr('type') + if (t && t !== 'none') out.headEnd = { type: t, w: he.attr('w'), len: he.attr('len') } + } + const te = ln.child('tailEnd') + if (te.exists()) { + const t = te.attr('type') + if (t && t !== 'none') out.tailEnd = { type: t, w: te.attr('w'), len: te.attr('len') } + } + return out +} + +// --------------------------------------------------------------------------- +// Shape Rendering +// --------------------------------------------------------------------------- + +/** + * Render a shape node into an absolutely-positioned HTML element with SVG geometry. + */ +export function renderShape(node: ShapeNodeData, ctx: RenderContext): HTMLElement { + const wrapper = document.createElement('div') + wrapper.style.position = 'absolute' + wrapper.style.left = `${node.position.x}px` + wrapper.style.top = `${node.position.y}px` + wrapper.style.width = `${node.size.w}px` + // Line-like: preset line/connector, or cxnSp (connection shape), or flat extent (one dimension 0) + const presetKey = node.presetGeometry?.toLowerCase() ?? '' + const outlineOnlyPresets = new Set([ + 'arc', + 'leftbracket', + 'rightbracket', + 'leftbrace', + 'rightbrace', + 'bracketpair', + 'bracepair', + ]) + const presetIsLine = + !!presetKey && + (presetKey === 'line' || + presetKey === 'lineinv' || + presetKey.includes('connector') || + outlineOnlyPresets.has(presetKey)) + const isConnectorShape = node.source.localName === 'cxnSp' + const flatExtent = + (node.size.w > 0 && node.size.h === 0) || (node.size.w === 0 && node.size.h > 0) + const isLineLike = presetIsLine || isConnectorShape || flatExtent + const minH = isLineLike && node.size.h === 0 ? 1 : node.size.h + const minW = isLineLike && node.size.w === 0 ? 1 : node.size.w + wrapper.style.height = `${minH}px` + if (node.size.w === 0) wrapper.style.width = `${minW}px` + wrapper.style.overflow = 'visible' + // Apply transforms (rotation + flip) + const transforms: string[] = [] + if (node.rotation !== 0) { + transforms.push(`rotate(${node.rotation}deg)`) + } + if (node.flipH) { + transforms.push('scaleX(-1)') + } + if (node.flipV) { + transforms.push('scaleY(-1)') + } + if (transforms.length > 0) { + wrapper.style.transform = transforms.join(' ') + } + + const w = node.size.w + const h = node.size.h + // For path generation, pass original w/h so preset functions can detect zero-extent + // directions (e.g. line preset draws vertical when w=0, horizontal when h=0). + // For SVG viewport, use minW/minH to guarantee a visible container. + const pathW = w + const pathH = h + + // Style references (needed for path fallback and line resolution) + const styleNode = node.source.child('style') + const lnRef = styleNode.exists() ? styleNode.child('lnRef') : undefined + const fillRef = styleNode.exists() ? styleNode.child('fillRef') : undefined + + // ---- Generate SVG path ---- + let pathD = '' + let multiPaths: PresetSubPath[] | null = null + if (node.presetGeometry) { + // For connector shapes (cxnSp), the 'line' preset should draw from start to end + // point (0,0)→(w,h), not a horizontal midline. Use 'straightConnector1' instead, + // which correctly handles diagonal/near-vertical connectors (e.g. cx≈0 but non-zero). + let effectivePreset = node.presetGeometry + if (isConnectorShape && effectivePreset === 'line') { + effectivePreset = 'straightConnector1' + } + // Try multi-path preset first (complex shapes like scrolls with darkenLess paths) + multiPaths = getMultiPathPreset(effectivePreset, pathW, pathH, node.adjustments) + if (multiPaths) { + // Use the first (main fill) path as pathD for backwards-compatible code paths + pathD = multiPaths[0]?.d ?? '' + } else { + pathD = getPresetShapePath(effectivePreset, pathW, pathH, node.adjustments) + } + } else if (node.customGeometry) { + const extNode = node.source.child('spPr').child('xfrm').child('ext') + const sourceExtentEmu = { + w: extNode.numAttr('cx') ?? 0, + h: extNode.numAttr('cy') ?? 0, + } + pathD = renderCustomGeometry(node.customGeometry, pathW, pathH, sourceExtentEmu) + } + // Connectors (cxnSp) or flat-extent shapes with line style but no geometry: draw as line + if ( + !pathD && + isLineLike && + (node.line?.exists() || + (lnRef?.exists() && + (lnRef.numAttr('idx') ?? 0) > 0 && + (ctx.theme.lineStyles?.length ?? 0) >= (lnRef.numAttr('idx') ?? 0))) + ) { + pathD = getPresetShapePath( + isConnectorShape ? 'straightConnector1' : 'line', + pathW, + pathH, + undefined + ) + } + + // ---- Resolve fill and line styles ---- + const spPr = node.source.child('spPr') + let fillCss = '' + // Resolve structured gradient fill data (for SVG gradient elements) + let gradientFillData = node.fill ? resolveGradientFill(spPr, ctx) : null + if (node.fill?.exists()) { + if (node.fill.localName === 'solidFill') { + const colorChild = node.fill.child('srgbClr').exists() + ? node.fill.child('srgbClr') + : node.fill.child('schemeClr').exists() + ? node.fill.child('schemeClr') + : node.fill.child('scrgbClr').exists() + ? node.fill.child('scrgbClr') + : node.fill.child('sysClr').exists() + ? node.fill.child('sysClr') + : undefined + if (colorChild?.exists()) fillCss = resolveColorToCss(colorChild, ctx) + } + if (!fillCss) fillCss = resolveFill(spPr, ctx) + } + // Diagram/SmartArt: read fill directly from source when still missing (spPr > solidFill > color) + if (!fillCss) { + const solidFill = spPr.child('solidFill') + if (solidFill.exists()) { + const colorChild = solidFill.child('srgbClr').exists() + ? solidFill.child('srgbClr') + : solidFill.child('schemeClr').exists() + ? solidFill.child('schemeClr') + : solidFill.child('scrgbClr').exists() + ? solidFill.child('scrgbClr') + : solidFill.child('sysClr').exists() + ? solidFill.child('sysClr') + : undefined + if (colorChild?.exists()) fillCss = resolveColorToCss(colorChild, ctx) + } + } + // fillRef fallback: when no explicit fill but fillRef idx > 0, use fillRef color + if (!fillCss && fillRef && fillRef.exists()) { + const resolvedThemeFill = resolveThemeFillReference(fillRef, ctx) + fillCss = resolvedThemeFill.fillCss + if (!gradientFillData) gradientFillData = resolvedThemeFill.gradientFillData + } + // Connectors and other line-like presets are stroke-only in OOXML. They may still + // carry style fillRefs, but those must not become filled ribbons in SVG. + if (isLineLike) { + fillCss = '' + gradientFillData = null + } + + let strokeColor = 'none' + let strokeWidth = 0 + let strokeDash = '' + let strokeDashKind = 'solid' + let strokeLinecap = '' + let strokeLinejoin = '' + let gradientStroke: ReturnType = null + + // Resolve effective line: explicit on shape, or use theme line from lnRef. + // When line is explicitly , do not use lnRef — diagram arrows (e.g. circularArrow) must have no stroke. + const lineIsNoFill = node.line?.child('noFill').exists() + const hasExplicitLine = node.line && !lineIsNoFill + const themeLineFromLnRef = + !hasExplicitLine && + !lineIsNoFill && + lnRef?.exists() && + (lnRef.numAttr('idx') ?? 0) > 0 && + (ctx.theme.lineStyles?.length ?? 0) >= (lnRef.numAttr('idx') ?? 0) + ? ctx.theme.lineStyles![(lnRef.numAttr('idx') ?? 1) - 1] + : undefined + let effectiveLine = hasExplicitLine ? node.line! : themeLineFromLnRef + if (lineIsNoFill) effectiveLine = undefined + + if (effectiveLine?.exists()) { + gradientStroke = resolveGradientStroke(effectiveLine, ctx) + if (!gradientStroke) { + const lineStyle = resolveLineStyle(effectiveLine, ctx, lnRef) + strokeColor = lineStyle.color + strokeWidth = lineStyle.width + strokeDash = lineStyle.dash + strokeDashKind = lineStyle.dashKind + } + + // Line cap: a:ln@cap → SVG stroke-linecap + const capAttr = effectiveLine.attr('cap') + if (capAttr === 'rnd') strokeLinecap = 'round' + else if (capAttr === 'sq') strokeLinecap = 'square' + else if (capAttr === 'flat') strokeLinecap = 'butt' + + // Line join: from child elements + if (effectiveLine.child('round').exists()) strokeLinejoin = 'round' + else if (effectiveLine.child('bevel').exists()) strokeLinejoin = 'bevel' + else if (effectiveLine.child('miter').exists()) strokeLinejoin = 'miter' + } + if (lineIsNoFill) { + strokeColor = 'none' + strokeWidth = 0 + gradientStroke = null + } + // SmartArt circularArrow must be fill-only (no stroke); preset-based override so diagram XML is not relied on. + const isCircularArrow = node.presetGeometry?.toLowerCase() === 'circulararrow' + if (isCircularArrow) { + strokeColor = 'none' + strokeWidth = 0 + gradientStroke = null + if (!fillCss) { + const solid = spPr.child('solidFill') + if (solid.exists()) { + const color = solid.child('srgbClr').exists() + ? solid.child('srgbClr') + : solid.child('schemeClr').exists() + ? solid.child('schemeClr') + : solid.child('scrgbClr').exists() + ? solid.child('scrgbClr') + : solid.child('sysClr').exists() + ? solid.child('sysClr') + : undefined + if (color?.exists()) fillCss = resolveColorToCss(color, ctx) + } + } + } + + // ---- Create SVG element ---- + if (pathD) { + const svgNs = 'http://www.w3.org/2000/svg' + const svg = document.createElementNS(svgNs, 'svg') + const svgW = isLineLike ? minW : w + const svgH = isLineLike ? minH : h + svg.setAttribute('viewBox', `0 0 ${svgW} ${svgH}`) + svg.setAttribute('width', String(svgW)) + svg.setAttribute('height', String(svgH)) + svg.style.position = 'absolute' + svg.style.left = '0' + svg.style.top = '0' + svg.style.overflow = 'visible' + + const blipFill = spPr.child('blipFill') + const blipUrl = blipFill.exists() ? resolveShapeBlipUrl(blipFill, ctx) : null + + // When shape has image fill (blipFill), render image clipped to path so complex graphics (e.g. slide 23 process) show + if (blipUrl) { + const defs = document.createElementNS(svgNs, 'defs') + const clipId = `shape-clip-${++gradientIdCounter}` + const clipPath = document.createElementNS(svgNs, 'clipPath') + clipPath.setAttribute('id', clipId) + const clipPathPath = document.createElementNS(svgNs, 'path') + clipPathPath.setAttribute('d', pathD) + clipPath.appendChild(clipPathPath) + defs.appendChild(clipPath) + const image = document.createElementNS(svgNs, 'image') + image.setAttributeNS('http://www.w3.org/1999/xlink', 'href', blipUrl) + image.setAttribute('x', '0') + image.setAttribute('y', '0') + image.setAttribute('width', String(svgW)) + image.setAttribute('height', String(svgH)) + image.setAttribute('clip-path', `url(#${clipId})`) + image.setAttribute('preserveAspectRatio', 'xMidYMid slice') + svg.appendChild(defs) + svg.appendChild(image) + wrapper.appendChild(svg) + // Skip path fill/stroke/markers — image replaces fill + } else { + // Create for gradients and markers + const defs = document.createElementNS(svgNs, 'defs') + + const path = document.createElementNS(svgNs, 'path') + path.setAttribute('d', pathD) + const presetLower = node.presetGeometry?.toLowerCase() + if (presetLower === 'curveduparrow' || presetLower === 'curveddownarrow') { + // Curved arrows can contain overlapping sub-contours near arrowhead roots. + // evenodd avoids tiny anti-alias seams that appear with nonzero winding. + path.setAttribute('fill-rule', 'evenodd') + path.setAttribute('stroke-linejoin', 'round') + } else if (presetLower === 'funnel') { + // Funnel has an inset ellipse sub-path that creates a "hole" (even-odd fill). + path.setAttribute('fill-rule', 'evenodd') + } + + // Fill + if (fillCss) { + if (gradientFillData && gradientFillData.stops.length > 0) { + // Create SVG gradient definition for proper shape-clipped gradient fills + const fillGradId = `grad-fill-${++gradientIdCounter}` + + if (gradientFillData.type === 'radial' && gradientFillData.pathType === 'rect') { + // OOXML path="rect" gradient: Chebyshev distance (L∞ norm) creates + // rectangular contour lines (the characteristic cross/X pattern). + // SVG/CSS radial-gradient only supports elliptical contours. + // Approximation: two linear gradients (H + V) blended with "lighten" + // (per-channel max). max(dx, dy) = L∞ norm = rectangular contours. + const gcx = gradientFillData.cx ?? 0.5 + const gcy = gradientFillData.cy ?? 0.5 + const stops = gradientFillData.stops + + // Mirror stops for center-out: original stop at N% → two stops at + // (center - N%*distToEdge) and (center + N%*distToEdge) in gradient coords. + const mirrorStops = (centerFrac: number) => { + const mirrored: Array<{ offset: number; color: string }> = [] + for (const s of stops) { + const t = s.position / 100 // 0..1 from center to edge + const below = centerFrac - t * centerFrac + const above = centerFrac + t * (1 - centerFrac) + mirrored.push({ offset: below, color: s.color }) + mirrored.push({ offset: above, color: s.color }) + } + mirrored.sort((a, b) => a.offset - b.offset) + return mirrored + } + + // Horizontal linear gradient (left → right, center at gcx) + const hGradId = `${fillGradId}-h` + const hGrad = document.createElementNS(svgNs, 'linearGradient') + hGrad.setAttribute('id', hGradId) + hGrad.setAttribute( + 'color-interpolation', + gradientFillData.colorInterpolation ?? 'linearRGB' + ) + hGrad.setAttribute('x1', '0%') + hGrad.setAttribute('y1', '0%') + hGrad.setAttribute('x2', '100%') + hGrad.setAttribute('y2', '0%') + for (const ms of mirrorStops(gcx)) { + const svgStop = document.createElementNS(svgNs, 'stop') + svgStop.setAttribute('offset', `${(ms.offset * 100).toFixed(2)}%`) + svgStop.setAttribute('stop-color', ms.color) + hGrad.appendChild(svgStop) + } + defs.appendChild(hGrad) + + // Vertical linear gradient (top → bottom, center at gcy) + const vGradId = `${fillGradId}-v` + const vGrad = document.createElementNS(svgNs, 'linearGradient') + vGrad.setAttribute('id', vGradId) + vGrad.setAttribute( + 'color-interpolation', + gradientFillData.colorInterpolation ?? 'linearRGB' + ) + vGrad.setAttribute('x1', '0%') + vGrad.setAttribute('y1', '0%') + vGrad.setAttribute('x2', '0%') + vGrad.setAttribute('y2', '100%') + for (const ms of mirrorStops(gcy)) { + const svgStop = document.createElementNS(svgNs, 'stop') + svgStop.setAttribute('offset', `${(ms.offset * 100).toFixed(2)}%`) + svgStop.setAttribute('stop-color', ms.color) + vGrad.appendChild(svgStop) + } + defs.appendChild(vGrad) + + // Use clipPath to constrain the blend group to the shape + const clipId = `${fillGradId}-clip` + const clipPath = document.createElementNS(svgNs, 'clipPath') + clipPath.setAttribute('id', clipId) + const clipUsePath = document.createElementNS(svgNs, 'path') + clipUsePath.setAttribute('d', pathD) + clipPath.appendChild(clipUsePath) + defs.appendChild(clipPath) + + // Isolated group: black backdrop + two gradient layers with lighten blend. + // lighten = per-channel max. Against black (0,0,0), first layer is identity. + // Second layer's lighten against first = max(H, V) per channel. + const blendGroup = document.createElementNS(svgNs, 'g') + blendGroup.setAttribute('clip-path', `url(#${clipId})`) + blendGroup.setAttribute('style', 'isolation: isolate') + + const bgRect = document.createElementNS(svgNs, 'rect') + bgRect.setAttribute('width', '100%') + bgRect.setAttribute('height', '100%') + bgRect.setAttribute('fill', 'black') + blendGroup.appendChild(bgRect) + + const hPath = document.createElementNS(svgNs, 'path') + hPath.setAttribute('d', pathD) + hPath.setAttribute('fill', `url(#${hGradId})`) + hPath.setAttribute('style', 'mix-blend-mode: lighten') + blendGroup.appendChild(hPath) + + const vPath = document.createElementNS(svgNs, 'path') + vPath.setAttribute('d', pathD) + vPath.setAttribute('fill', `url(#${vGradId})`) + vPath.setAttribute('style', 'mix-blend-mode: lighten') + blendGroup.appendChild(vPath) + + // Mark path as no-fill; the blend group handles it. + // Tag the blend group so we can insert it before the main path later. + path.setAttribute('fill', 'none') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(path as any).__rectBlendGroup = blendGroup + } else if (gradientFillData.type === 'radial') { + const radialGrad = document.createElementNS(svgNs, 'radialGradient') + radialGrad.setAttribute('id', fillGradId) + radialGrad.setAttribute( + 'color-interpolation', + gradientFillData.colorInterpolation ?? 'linearRGB' + ) + radialGrad.setAttribute('gradientUnits', 'userSpaceOnUse') + const gcx = gradientFillData.cx ?? 0.5 + const gcy = gradientFillData.cy ?? 0.5 + radialGrad.setAttribute('cx', String(gcx * svgW)) + radialGrad.setAttribute('cy', String(gcy * svgH)) + // path="circle"/"shape": gradient reaches farthest corner + const maxDx = Math.max(gcx, 1 - gcx) + const maxDy = Math.max(gcy, 1 - gcy) + const r = Math.sqrt(maxDx * maxDx + maxDy * maxDy) + radialGrad.setAttribute('r', String(r * Math.max(svgW, svgH))) + for (const stop of gradientFillData.stops) { + const svgStop = document.createElementNS(svgNs, 'stop') + svgStop.setAttribute('offset', `${stop.position}%`) + svgStop.setAttribute('stop-color', stop.color) + radialGrad.appendChild(svgStop) + } + defs.appendChild(radialGrad) + } else { + // Linear gradient + const linearGrad = document.createElementNS(svgNs, 'linearGradient') + linearGrad.setAttribute('id', fillGradId) + linearGrad.setAttribute( + 'color-interpolation', + gradientFillData.colorInterpolation ?? 'linearRGB' + ) + linearGrad.setAttribute('gradientUnits', 'userSpaceOnUse') + const coords = angleToSvgGradientCoords(gradientFillData.angle) + linearGrad.setAttribute('x1', String((Number.parseFloat(coords.x1) / 100) * svgW)) + linearGrad.setAttribute('y1', String((Number.parseFloat(coords.y1) / 100) * svgH)) + linearGrad.setAttribute('x2', String((Number.parseFloat(coords.x2) / 100) * svgW)) + linearGrad.setAttribute('y2', String((Number.parseFloat(coords.y2) / 100) * svgH)) + for (const stop of gradientFillData.stops) { + const svgStop = document.createElementNS(svgNs, 'stop') + svgStop.setAttribute('offset', `${stop.position}%`) + svgStop.setAttribute('stop-color', stop.color) + linearGrad.appendChild(svgStop) + } + defs.appendChild(linearGrad) + } + + // For rect blend group, fill was already handled (path set to 'none', blend group added). + if (!(gradientFillData.type === 'radial' && gradientFillData.pathType === 'rect')) { + path.setAttribute('fill', `url(#${fillGradId})`) + } + } else if (fillCss === 'transparent') { + path.setAttribute('fill', 'none') + } else if (fillCss.includes('gradient')) { + // Fallback for gradients without structured data (shouldn't normally happen) + // Apply to wrapper as before + wrapper.style.background = fillCss + path.setAttribute('fill', 'transparent') + } else { + path.setAttribute('fill', fillCss) + } + } else { + path.setAttribute('fill', 'none') + } + // SmartArt circularArrow: force no stroke; fill already resolved via fillRef/solidFill above + if (isCircularArrow) { + // fillCss was already resolved (including fillRef fallback). Only override if still empty. + if (!fillCss || fillCss === 'none' || fillCss === 'transparent') { + // Try spPr > solidFill > color child as last resort + const colorTags = ['srgbClr', 'schemeClr', 'scrgbClr', 'sysClr', 'hslClr', 'prstClr'] + let fallbackFill = '' + const solid = spPr.child('solidFill') + if (solid.exists()) { + for (const child of solid.allChildren()) { + if (colorTags.includes(child.localName)) { + fallbackFill = resolveColorToCss(child, ctx) + break + } + } + } + if (!fallbackFill && node.fill?.exists()) { + for (const child of node.fill.allChildren()) { + if (colorTags.includes(child.localName)) { + fallbackFill = resolveColorToCss(child, ctx) + break + } + } + } + if (fallbackFill) path.setAttribute('fill', fallbackFill) + } + path.setAttribute('stroke', 'none') + } + + // Resolve arrow ends and effective stroke width before applying stroke (so we can enforce min width for connectors) + let effectiveHeadEnd = node.headEnd + let effectiveTailEnd = node.tailEnd + if ((!effectiveHeadEnd || !effectiveTailEnd) && effectiveLine?.exists()) { + const fromLn = getLineEndsFromLn(effectiveLine) + if (!effectiveHeadEnd && fromLn.headEnd) effectiveHeadEnd = fromLn.headEnd + if (!effectiveTailEnd && fromLn.tailEnd) effectiveTailEnd = fromLn.tailEnd + } + // For gradient strokes, use first stop for marker-start and last stop for marker-end + // so arrowhead colours match the visible gradient end rather than always using the lightest stop. + const gradStartColor = gradientStroke + ? gradientStroke.stops[0]?.color || 'black' + : strokeColor + const gradEndColor = gradientStroke + ? gradientStroke.stops[gradientStroke.stops.length - 1]?.color || gradStartColor + : strokeColor + let effectiveStrokeWidth = gradientStroke ? gradientStroke.width : strokeWidth + if (isLineLike && (effectiveHeadEnd || effectiveTailEnd) && effectiveStrokeWidth <= 0) { + effectiveStrokeWidth = 1 // so connector line and arrows both show (e.g. slide 24) + } + + // Stroke — gradient stroke or solid stroke (skip for circularArrow; already set stroke=none above) + // For multi-path presets where the first sub-path specifies stroke:false (e.g. callout1/2/3, + // accentCallout1/2/3), suppress stroke on the main path element — the leader line and accent + // bar are rendered as separate sub-path elements with their own stroke settings. + const mainPathStrokeSuppressed = multiPaths && multiPaths[0]?.stroke === false + if ( + !isCircularArrow && + !mainPathStrokeSuppressed && + gradientStroke && + gradientStroke.stops.length > 0 + ) { + // Create SVG linearGradient for the gradient stroke. + // Use userSpaceOnUse so the gradient is defined in SVG coordinate space rather + // than objectBoundingBox. This is critical for straight line paths (zero-width or + // zero-height bounding box) where objectBoundingBox produces degenerate coordinates + // and the gradient becomes invisible. + const gradId = `grad-stroke-${++gradientIdCounter}` + const linearGrad = document.createElementNS(svgNs, 'linearGradient') + linearGrad.setAttribute('id', gradId) + linearGrad.setAttribute( + 'color-interpolation', + gradientStroke.colorInterpolation ?? 'linearRGB' + ) + linearGrad.setAttribute('gradientUnits', 'userSpaceOnUse') + + // Convert gradient angle to absolute coordinates in SVG user space + const rad = (gradientStroke.angle * Math.PI) / 180 + const cos = Math.cos(rad) + const sin = Math.sin(rad) + // Centre of the SVG viewBox + const cx = svgW / 2 + const cy = svgH / 2 + // Half-extent along each axis (use max of both dimensions so the gradient covers the path) + const halfLen = Math.max(svgW, svgH) / 2 + linearGrad.setAttribute('x1', String(cx - halfLen * cos)) + linearGrad.setAttribute('y1', String(cy - halfLen * sin)) + linearGrad.setAttribute('x2', String(cx + halfLen * cos)) + linearGrad.setAttribute('y2', String(cy + halfLen * sin)) + + for (const stop of gradientStroke.stops) { + const svgStop = document.createElementNS(svgNs, 'stop') + svgStop.setAttribute('offset', `${stop.position}%`) + svgStop.setAttribute('stop-color', stop.color) + linearGrad.appendChild(svgStop) + } + + defs.appendChild(linearGrad) + + const strokeW = Math.max(gradientStroke.width, 1) + path.setAttribute('stroke', `url(#${gradId})`) + path.setAttribute('stroke-width', String(strokeW)) + if (strokeLinecap) path.setAttribute('stroke-linecap', strokeLinecap) + if (strokeLinejoin) path.setAttribute('stroke-linejoin', strokeLinejoin) + } else if ( + !isCircularArrow && + !mainPathStrokeSuppressed && + effectiveStrokeWidth > 0 && + strokeColor !== 'transparent' + ) { + path.setAttribute('stroke', strokeColor) + path.setAttribute('stroke-width', String(effectiveStrokeWidth)) + if (strokeLinecap) path.setAttribute('stroke-linecap', strokeLinecap) + if (strokeLinejoin) path.setAttribute('stroke-linejoin', strokeLinejoin) + const svgDashArray = svgDashArrayForKind(strokeDashKind, effectiveStrokeWidth) + if (svgDashArray) { + path.setAttribute('stroke-dasharray', svgDashArray) + } else if (strokeDash === 'dashed') { + path.setAttribute( + 'stroke-dasharray', + `${effectiveStrokeWidth * 4},${effectiveStrokeWidth * 2}` + ) + } else if (strokeDash === 'dotted') { + path.setAttribute( + 'stroke-dasharray', + `${effectiveStrokeWidth},${effectiveStrokeWidth * 2}` + ) + } + } else { + path.setAttribute('stroke', 'none') + } + + // Line end markers (arrowheads) + // Use gradient start colour for head (marker-start) and end colour for tail (marker-end) + if (effectiveStrokeWidth > 0 && (effectiveHeadEnd || effectiveTailEnd)) { + if (effectiveHeadEnd) { + const marker = createArrowMarker( + svgNs, + effectiveHeadEnd, + gradStartColor, + effectiveStrokeWidth, + true + ) + if (marker) { + defs.appendChild(marker) + // eslint-disable-next-line @typescript-eslint/no-explicit-any + path.setAttribute('marker-start', `url(#${(marker as any)._markerId})`) + } + } + + if (effectiveTailEnd) { + const marker = createArrowMarker( + svgNs, + effectiveTailEnd, + gradEndColor, + effectiveStrokeWidth, + false + ) + if (marker) { + defs.appendChild(marker) + // eslint-disable-next-line @typescript-eslint/no-explicit-any + path.setAttribute('marker-end', `url(#${(marker as any)._markerId})`) + } + } + } + + // Insert rect blend group (two linear gradients + lighten) before the main path + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if ((path as any).__rectBlendGroup) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + svg.appendChild((path as any).__rectBlendGroup)( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + path as any + ).__rectBlendGroup = undefined + } + + svg.appendChild(path) + + // --- Multi-path preset rendering --- + // For complex shapes (scrolls, etc.) that have multiple sub-paths with different + // fill modifiers (darkenLess for shadow areas, none for stroke-only detail lines). + if (multiPaths && multiPaths.length > 1) { + const mainPathFill = path.getAttribute('fill') ?? '' + const presetLower = node.presetGeometry?.toLowerCase() ?? '' + const shadingBaseFill = + mainPathFill && !mainPathFill.startsWith('url(') + ? mainPathFill + : fillRef?.exists() + ? resolveColorToCss(fillRef, ctx) + : (gradientFillData?.stops[0]?.color ?? fillCss) + const baseRgb = parseCssColorToRgb(shadingBaseFill) + const appendTintedGradientFill = ( + amount: number, + target: { r: number; g: number; b: number } + ): string | undefined => { + if (gradientFillData?.type !== 'linear' || gradientFillData.stops.length === 0) + return undefined + const gradId = `grad-fill-detail-${++gradientIdCounter}` + const linearGrad = document.createElementNS(svgNs, 'linearGradient') + linearGrad.setAttribute('id', gradId) + linearGrad.setAttribute('gradientUnits', 'userSpaceOnUse') + linearGrad.setAttribute( + 'color-interpolation', + gradientFillData.colorInterpolation ?? 'sRGB' + ) + const coords = angleToSvgGradientCoords(gradientFillData.angle) + linearGrad.setAttribute('x1', String((Number.parseFloat(coords.x1) / 100) * svgW)) + linearGrad.setAttribute('y1', String((Number.parseFloat(coords.y1) / 100) * svgH)) + linearGrad.setAttribute('x2', String((Number.parseFloat(coords.x2) / 100) * svgW)) + linearGrad.setAttribute('y2', String((Number.parseFloat(coords.y2) / 100) * svgH)) + for (const stop of gradientFillData.stops) { + const svgStop = document.createElementNS(svgNs, 'stop') + svgStop.setAttribute('offset', `${stop.position}%`) + const stopRgb = parseCssColorToRgb(stop.color) + svgStop.setAttribute( + 'stop-color', + stopRgb ? mixRgb(stopRgb, target, amount) : stop.color + ) + linearGrad.appendChild(svgStop) + } + defs.appendChild(linearGrad) + return `url(#${gradId})` + } + // The first path was already rendered above as the main path. + // Render additional sub-paths (darkenLess shadow, stroke-only detail lines). + for (let pi = 1; pi < multiPaths.length; pi++) { + const sp = multiPaths[pi] + const extraPath = document.createElementNS(svgNs, 'path') + extraPath.setAttribute('d', sp.d) + if (sp.fill === 'none') { + extraPath.setAttribute('fill', 'none') + } else if (sp.fill === 'darkenLess') { + extraPath.setAttribute( + 'fill', + appendTintedGradientFill(0.15, { r: 0, g: 0, b: 0 }) || + (baseRgb ? mixRgb(baseRgb, { r: 0, g: 0, b: 0 }, 0.15) : 'rgba(0,0,0,0.15)') + ) + } else if (sp.fill === 'darken') { + extraPath.setAttribute( + 'fill', + appendTintedGradientFill(0.3, { r: 0, g: 0, b: 0 }) || + (baseRgb ? mixRgb(baseRgb, { r: 0, g: 0, b: 0 }, 0.3) : 'rgba(0,0,0,0.3)') + ) + } else if (sp.fill === 'lightenLess') { + extraPath.setAttribute( + 'fill', + appendTintedGradientFill(0.18, { r: 255, g: 255, b: 255 }) || + (baseRgb + ? mixRgb(baseRgb, { r: 255, g: 255, b: 255 }, 0.18) + : 'rgba(255,255,255,0.15)') + ) + } else if (sp.fill === 'lighten') { + let canHighlight: string | undefined + if ( + presetLower === 'can' && + gradientFillData?.type === 'linear' && + gradientFillData.stops.length > 0 + ) { + const faceGradId = `grad-fill-face-${++gradientIdCounter}` + const faceGrad = document.createElementNS(svgNs, 'linearGradient') + faceGrad.setAttribute('id', faceGradId) + faceGrad.setAttribute('gradientUnits', 'userSpaceOnUse') + faceGrad.setAttribute('color-interpolation', 'sRGB') + const coords = angleToSvgGradientCoords(gradientFillData.angle) + faceGrad.setAttribute('x1', String((Number.parseFloat(coords.x1) / 100) * svgW)) + faceGrad.setAttribute('y1', String((Number.parseFloat(coords.y1) / 100) * svgH)) + faceGrad.setAttribute('x2', String((Number.parseFloat(coords.x2) / 100) * svgW)) + faceGrad.setAttribute('y2', String((Number.parseFloat(coords.y2) / 100) * svgH)) + for (const stop of gradientFillData.stops) { + const svgStop = document.createElementNS(svgNs, 'stop') + svgStop.setAttribute('offset', `${stop.position}%`) + svgStop.setAttribute('stop-color', applyTint(stop.color, 65000)) + faceGrad.appendChild(svgStop) + } + defs.appendChild(faceGrad) + canHighlight = `url(#${faceGradId})` + } else if (presetLower === 'can' && mainPathFill.startsWith('url(')) { + canHighlight = mainPathFill + } + extraPath.setAttribute( + 'fill', + canHighlight || + (baseRgb + ? mixRgb(baseRgb, { r: 255, g: 255, b: 255 }, 0.3) + : 'rgba(255,255,255,0.3)') + ) + } else { + // 'norm' — same fill as main path + extraPath.setAttribute('fill', mainPathFill || 'none') + } + if (sp.stroke && effectiveStrokeWidth > 0 && strokeColor !== 'transparent') { + extraPath.setAttribute('stroke', strokeColor) + const isBorderCalloutLeader = + node.presetGeometry?.toLowerCase() === 'bordercallout1' && sp.fill === 'none' + const scaledStrokeWidth = + sp.strokeWidthScale && Number.isFinite(sp.strokeWidthScale) && sp.strokeWidthScale > 0 + ? effectiveStrokeWidth * sp.strokeWidthScale + : effectiveStrokeWidth + const extraStrokeWidth = isBorderCalloutLeader + ? Math.max(scaledStrokeWidth, 2.4) + : scaledStrokeWidth + extraPath.setAttribute('stroke-width', String(extraStrokeWidth)) + if (isBorderCalloutLeader) extraPath.setAttribute('stroke-linecap', 'round') + if ( + sp.maskToMainOutlineBandScale && + sp.maskToMainOutlineBandScale > 0 && + sp.maskToMainOutlineBandScale < 1 + ) { + const maskId = `shape-detail-band-mask-${++gradientIdCounter}` + const mask = document.createElementNS(svgNs, 'mask') + mask.setAttribute('id', maskId) + mask.setAttribute('maskUnits', 'userSpaceOnUse') + mask.setAttribute('maskContentUnits', 'userSpaceOnUse') + const maskBg = document.createElementNS(svgNs, 'rect') + maskBg.setAttribute('x', '0') + maskBg.setAttribute('y', '0') + maskBg.setAttribute('width', String(svgW)) + maskBg.setAttribute('height', String(svgH)) + maskBg.setAttribute('fill', 'black') + mask.appendChild(maskBg) + + const outerPath = document.createElementNS(svgNs, 'path') + outerPath.setAttribute('d', pathD) + outerPath.setAttribute('fill', 'white') + outerPath.setAttribute('stroke', 'none') + mask.appendChild(outerPath) + + const insetScale = sp.maskToMainOutlineBandScale + const insetPath = document.createElementNS(svgNs, 'path') + insetPath.setAttribute('d', pathD) + insetPath.setAttribute('fill', 'black') + insetPath.setAttribute('stroke', 'none') + const tx = (svgW * (1 - insetScale)) / 2 + const ty = (svgH * (1 - insetScale)) / 2 + insetPath.setAttribute('transform', `translate(${tx} ${ty}) scale(${insetScale})`) + mask.appendChild(insetPath) + + defs.appendChild(mask) + extraPath.setAttribute('mask', `url(#${maskId})`) + } else if (sp.maskToMainOutline) { + const maskId = `shape-detail-mask-${++gradientIdCounter}` + const mask = document.createElementNS(svgNs, 'mask') + mask.setAttribute('id', maskId) + mask.setAttribute('maskUnits', 'userSpaceOnUse') + mask.setAttribute('maskContentUnits', 'userSpaceOnUse') + const maskBg = document.createElementNS(svgNs, 'rect') + maskBg.setAttribute('x', '0') + maskBg.setAttribute('y', '0') + maskBg.setAttribute('width', String(svgW)) + maskBg.setAttribute('height', String(svgH)) + maskBg.setAttribute('fill', 'black') + mask.appendChild(maskBg) + const maskPath = document.createElementNS(svgNs, 'path') + maskPath.setAttribute('d', pathD) + maskPath.setAttribute('fill', 'none') + maskPath.setAttribute('stroke', 'white') + const maskStrokeWidth = Math.max( + extraStrokeWidth * + (sp.maskStrokeScale && sp.maskStrokeScale > 0 ? sp.maskStrokeScale : 3), + extraStrokeWidth + ) + maskPath.setAttribute('stroke-width', String(maskStrokeWidth)) + maskPath.setAttribute('stroke-linecap', 'round') + maskPath.setAttribute('stroke-linejoin', 'round') + mask.appendChild(maskPath) + defs.appendChild(mask) + extraPath.setAttribute('mask', `url(#${maskId})`) + } + } else if (sp.stroke) { + // Detail lines without explicit line style: avoid using identical fill color, + // otherwise guide lines (e.g. chartX diagonals) become visually invisible. + const detailStroke = baseRgb ? mixRgb(baseRgb, { r: 0, g: 0, b: 0 }, 0.55) : '#666666' + extraPath.setAttribute('stroke', detailStroke) + extraPath.setAttribute('stroke-width', '1') + } else { + extraPath.setAttribute('stroke', 'none') + } + svg.appendChild(extraPath) + } + } + + // Some multi-path detail rendering adds masks/gradients after the initial defs population. + if (defs.children.length > 0 && !defs.parentNode) { + svg.insertBefore(defs, svg.firstChild) + } + + // circularArrow: ensure no stroke and remove markers + if (isCircularArrow) { + path.setAttribute('stroke', 'none') + path.removeAttribute('stroke-width') + path.removeAttribute('marker-start') + path.removeAttribute('marker-end') + } + + // --- Action button icon overlay (legacy fallback) --- + // Only used for action buttons that don't have multiPathPresets entries. + // Shapes with multiPathPresets already include the icon in their darken sub-paths. + if (node.presetGeometry && !multiPaths) { + const iconD = getActionButtonIconPath(node.presetGeometry, pathW, pathH) + if (iconD) { + const iconPath = document.createElementNS(svgNs, 'path') + iconPath.setAttribute('d', iconD) + // PowerPoint uses a darkened shade (~50%) of the fill colour for action button icons. + let iconFill = '#333333' + if (fillCss && fillCss !== 'transparent' && fillCss !== 'none') { + const m = fillCss.match(/^#([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})/i) + if (m) { + const r = Number.parseInt(m[1], 16) + const g = Number.parseInt(m[2], 16) + const b = Number.parseInt(m[3], 16) + // Shade at 50%: darken each channel by half + iconFill = rgbToHex(Math.round(r * 0.5), Math.round(g * 0.5), Math.round(b * 0.5)) + } + } + iconPath.setAttribute('fill', iconFill) + iconPath.setAttribute('stroke', 'none') + svg.appendChild(iconPath) + } + } + + // (Can top ellipse overlay removed — now handled by multiPathPresets 'can' lighten sub-path) + + wrapper.appendChild(svg) + } + } else if (fillCss && fillCss !== 'transparent') { + // No geometry but has fill — apply as background color + if (fillCss.includes('gradient')) { + wrapper.style.background = fillCss + } else { + wrapper.style.backgroundColor = fillCss + } + } + + // ---- Render text overlay (only when there is visible text; skip for decorative shapes with empty txBody) ---- + if (node.textBody && node.textBody.paragraphs.length > 0 && hasVisibleText(node.textBody)) { + const textContainer = document.createElement('div') + textContainer.style.position = 'absolute' + if (node.textBoxBounds) { + textContainer.style.left = `${node.textBoxBounds.x}px` + textContainer.style.top = `${node.textBoxBounds.y}px` + textContainer.style.width = `${node.textBoxBounds.w}px` + textContainer.style.height = `${node.textBoxBounds.h}px` + } else { + textContainer.style.left = '0' + textContainer.style.top = '0' + textContainer.style.width = '100%' + textContainer.style.height = '100%' + } + textContainer.style.display = 'flex' + textContainer.style.flexDirection = 'column' + textContainer.style.boxSizing = 'border-box' + // Overflow handling based on bodyPr auto-fit mode: + // - spAutoFit: shape resizes to fit text → overflow visible + // - normAutofit: text shrinks to fit shape → apply fontScale, overflow hidden + // - noAutofit: text clips → overflow hidden + // - (default, no child): PowerPoint implicitly auto-shrinks → overflow visible + const bodyPrForFit = node.textBody?.bodyProperties + const hasSpAutoFit = bodyPrForFit?.child('spAutoFit').exists() + const normAutofit = bodyPrForFit?.child('normAutofit') + const hasNormAutofit = normAutofit?.exists() + textContainer.style.overflowX = 'visible' + // noAutofit means "don't auto-fit" — NOT "clip text". PowerPoint allows text to + // overflow the shape boundary visibly. + textContainer.style.overflowY = 'visible' + + // normAutofit: PowerPoint stores the computed fontScale (1000ths of percent). + // Apply it as a CSS transform to shrink text so it fits the shape. + let needsDynamicAutofit = false + if (hasNormAutofit && normAutofit) { + textContainer.style.overflowY = 'hidden' + const fontScale = normAutofit.numAttr('fontScale') + const lnSpcReduction = normAutofit.numAttr('lnSpcReduction') ?? 0 + if (fontScale != null && fontScale < 100000) { + const scale = fontScale / 100000 + textContainer.style.transformOrigin = 'top left' + textContainer.style.transform = `scale(${scale})` + // Expand container dimensions so the scaled content fills the original space + textContainer.style.width = `${100 / scale}%` + textContainer.style.height = `${100 / scale}%` + } else if (fontScale == null) { + // fontScale not stored in XML — PowerPoint computes it at runtime. + // We'll measure after DOM insertion and apply dynamic scaling. + needsDynamicAutofit = true + } + if (lnSpcReduction > 0) { + const lnFactor = 1 - lnSpcReduction / 100000 + textContainer.style.lineHeight = `${lnFactor}` + } + } + // spAutoFit requests in-shape text fitting. In browser rendering we cannot + // resize the absolutely positioned shape like PowerPoint editor behavior, + // so use bounded dynamic scaling to prevent bleed across neighboring nodes. + if (hasSpAutoFit && !hasNormAutofit) { + textContainer.style.overflowY = 'hidden' + needsDynamicAutofit = true + } + + // Apply bodyPr (text body properties) + // Use layout/master bodyPr as fallback for missing attributes + { + const bodyPr = node.textBody.bodyProperties + const fallbackBp = node.textBody.layoutBodyProperties + + if (bodyPr) { + // Text wrap: only wrap="none" should force single-line. + // Title placeholders without explicit wrap should still be allowed to wrap. + const wrap = bodyPr.attr('wrap') || (fallbackBp ? fallbackBp.attr('wrap') : null) + if (wrap === 'none') { + textContainer.style.whiteSpace = 'nowrap' + } + } + + // Vertical alignment (anchor): prefer shape's own, then layout placeholder + const anchor = + (bodyPr ? bodyPr.attr('anchor') : null) || (fallbackBp ? fallbackBp.attr('anchor') : null) + if (anchor === 't') { + textContainer.style.justifyContent = 'flex-start' + } else if (anchor === 'ctr') { + textContainer.style.justifyContent = 'center' + } else if (anchor === 'b') { + textContainer.style.justifyContent = 'flex-end' + } + + // Internal margins (insets): prefer shape's own, then layout, then OOXML defaults + const lIns = + (bodyPr ? bodyPr.numAttr('lIns') : undefined) ?? + (fallbackBp ? fallbackBp.numAttr('lIns') : undefined) + const tIns = + (bodyPr ? bodyPr.numAttr('tIns') : undefined) ?? + (fallbackBp ? fallbackBp.numAttr('tIns') : undefined) + const rIns = + (bodyPr ? bodyPr.numAttr('rIns') : undefined) ?? + (fallbackBp ? fallbackBp.numAttr('rIns') : undefined) + const bIns = + (bodyPr ? bodyPr.numAttr('bIns') : undefined) ?? + (fallbackBp ? fallbackBp.numAttr('bIns') : undefined) + + // Default insets are 91440 EMU (0.1 inch) for L/R, 45720 EMU (0.05 inch) for T/B + const leftPad = lIns !== undefined ? emuToPx(lIns) : emuToPx(91440) + const topPad = tIns !== undefined ? emuToPx(tIns) : emuToPx(45720) + const rightPad = rIns !== undefined ? emuToPx(rIns) : emuToPx(91440) + const bottomPad = bIns !== undefined ? emuToPx(bIns) : emuToPx(45720) + + textContainer.style.paddingLeft = `${leftPad}px` + textContainer.style.paddingTop = `${topPad}px` + textContainer.style.paddingRight = `${rightPad}px` + textContainer.style.paddingBottom = `${bottomPad}px` + + // Vertical text support (bodyPr@vert) + const vert = + (bodyPr ? bodyPr.attr('vert') : null) || (fallbackBp ? fallbackBp.attr('vert') : null) + if (vert === 'eaVert') { + textContainer.style.writingMode = 'vertical-rl' + } else if (vert === 'vert' || vert === 'wordArtVert') { + textContainer.style.writingMode = 'vertical-rl' + } else if (vert === 'vert270') { + textContainer.style.writingMode = 'vertical-rl' + textContainer.style.transform = `${textContainer.style.transform || ''} rotate(180deg)` + } + } + + // Diagram text can carry its own txXfrm rotation; apply it inside the shape wrapper. + if (node.textBoxBounds?.rotation && node.textBoxBounds.rotation !== 0) { + const existing = textContainer.style.transform || '' + textContainer.style.transform = `${existing} rotate(${node.textBoxBounds.rotation}deg)`.trim() + textContainer.style.transformOrigin = 'center center' + } + + // If text was flipped, un-flip the text so it reads correctly + // Append to existing transforms (don't overwrite vert270 rotation) + if (node.flipH || node.flipV) { + const existing = textContainer.style.transform || '' + const flipParts: string[] = [] + if (node.flipH) flipParts.push('scaleX(-1)') + if (node.flipV) flipParts.push('scaleY(-1)') + textContainer.style.transform = `${existing} ${flipParts.join(' ')}`.trim() + } + + // Resolve fontRef color from shape style element (used by SmartArt diagram shapes + // where text color is specified via dsp:style > a:fontRef > a:schemeClr). + let fontRefColor: string | undefined + const shapeStyle = node.source.child('style') + if (shapeStyle.exists()) { + const fontRef = shapeStyle.child('fontRef') + if (fontRef.exists() && fontRef.allChildren().length > 0) { + fontRefColor = resolveColorToCss(fontRef, ctx) + } + } + + renderTextBody( + node.textBody, + node.placeholder, + ctx, + textContainer, + fontRefColor ? { fontRefColor } : undefined + ) + wrapper.appendChild(textContainer) + + // Dynamic normAutofit: when fontScale is not stored in the XML, measure the + // rendered text and compute the needed scale so all text fits the container. + if (needsDynamicAutofit) { + // The wrapper isn't in the DOM yet, so temporarily attach it offscreen to measure. + wrapper.style.visibility = 'hidden' + document.body.appendChild(wrapper) + // Temporarily neutralise vertical alignment so content overflows downward + // (flex-end would push content upward, making scrollHeight == clientHeight). + const savedJC = textContainer.style.justifyContent + textContainer.style.justifyContent = 'flex-start' + const containerH = textContainer.clientHeight + const contentH = textContainer.scrollHeight + textContainer.style.justifyContent = savedJC + document.body.removeChild(wrapper) + wrapper.style.visibility = '' + if (contentH > containerH && containerH > 0) { + const scale = containerH / contentH + textContainer.style.transformOrigin = 'top left' + textContainer.style.transform = `scale(${scale})` + textContainer.style.width = `${100 / scale}%` + textContainer.style.height = `${100 / scale}%` + } + } + } + + // ---- Effects (explicit effectLst or theme effectRef fallback) ---- + let effectiveEffectLst = spPr.child('effectLst') + if (!effectiveEffectLst.exists()) { + const effectRef = node.source.child('style').child('effectRef') + const idx = effectRef.numAttr('idx') ?? 0 + if (idx > 0 && (ctx.theme.effectStyles?.length ?? 0) >= idx) { + const themeEffect = ctx.theme.effectStyles[idx - 1] + if (themeEffect.exists()) { + const lst = themeEffect.child('effectLst') + if (lst.exists()) effectiveEffectLst = lst + } + } + } + + if (effectiveEffectLst.exists()) { + const outerShdw = effectiveEffectLst.child('outerShdw') + if (outerShdw.exists()) { + const dir = outerShdw.numAttr('dir') ?? 0 // direction in 60000ths of degree + const dist = outerShdw.numAttr('dist') ?? 0 // distance in EMU + const blurRad = outerShdw.numAttr('blurRad') ?? 0 // blur radius in EMU + const sx = outerShdw.numAttr('sx') // horizontal scale (100000 = 100%) + const sy = outerShdw.numAttr('sy') // vertical scale (100000 = 100%) + const algn = outerShdw.attr('algn') // alignment anchor (t, b, tl, tr, etc.) + + const dirDeg = dir / 60000 + const distPx = emuToPx(dist) + const blurPx = emuToPx(blurRad) + const offsetX = distPx * Math.cos((dirDeg * Math.PI) / 180) + const offsetY = distPx * Math.sin((dirDeg * Math.PI) / 180) + + // Resolve shadow color + let shadowColor = 'rgba(0,0,0,0.4)' + const { color: shdColor, alpha: shdAlpha } = resolveColor(outerShdw, ctx) + if (shdColor) { + const hex = shdColor.startsWith('#') ? shdColor : `#${shdColor}` + const { r: sr, g: sg, b: sb } = hexToRgb(hex) + shadowColor = `rgba(${sr},${sg},${sb},${shdAlpha.toFixed(3)})` + } + + // PowerPoint outerShdw with sx/sy creates a scaled shadow copy, then draws the + // shape on top. When dist=0 and scale ≈ 100%, only the thin edge overhang is + // visible – far subtler than a CSS drop-shadow with the full blur radius. + // Approximate with box-shadow using spread derived from scale and reduced blur. + if (sx != null && sy != null && sx > 0 && sy > 0) { + const scaleX = sx / 100000 + const scaleY = sy / 100000 + const shapeW = node.size?.w ?? 100 + const shapeH = node.size?.h ?? 100 + + // For line-like shapes, sx/sy should scale line thickness, not full line length. + // Using shape width here can explode spread on long connectors (slide 68 regression). + let spreadBasisW = shapeW + let spreadBasisH = shapeH + if (isLineLike || shapeW <= 1 || shapeH <= 1) { + const lineWEmu = node.line?.numAttr('w') ?? 12700 + const lineThickness = Math.max(1, emuToPx(lineWEmu)) + spreadBasisW = lineThickness + spreadBasisH = lineThickness + } + + // Spread = how far the shadow extends beyond the shape on each side + const spreadX = (spreadBasisW * (scaleX - 1)) / 2 + const spreadY = (spreadBasisH * (scaleY - 1)) / 2 + const spread = Math.max(0, (spreadX + spreadY) / 2) + + // Alignment shifts the shadow anchor point; compute extra offset + let alignOffX = 0 + let alignOffY = 0 + if (algn) { + // OOXML algn is an enum (t, b, l, r, tl, tr, bl, br, ctr), not a substring bag. + // Exact matching avoids misinterpreting "ctr" as containing both "t" and "r". + const a = algn.toLowerCase() + if (a === 't' || a === 'tl' || a === 'tr') alignOffY = (spreadBasisH * (scaleY - 1)) / 2 + if (a === 'b' || a === 'bl' || a === 'br') alignOffY = (-spreadBasisH * (scaleY - 1)) / 2 + if (a === 'l' || a === 'tl' || a === 'bl') alignOffX = (spreadBasisW * (scaleX - 1)) / 2 + if (a === 'r' || a === 'tr' || a === 'br') alignOffX = (-spreadBasisW * (scaleX - 1)) / 2 + } + + // When spread is tiny relative to blurPx, PowerPoint's Gaussian blur + // distributes energy across the full blur area. The visible edge (only + // `spread` wide) receives only a fraction of the original alpha. + // Attenuate alpha accordingly so thin-edge shadows are nearly invisible. + const effectiveBlur = Math.min(blurPx, spread * 3) + let effectiveAlpha = shdAlpha + if (blurPx > 0 && spread < blurPx) { + effectiveAlpha = shdAlpha * (spread / blurPx) + } + + // Skip shadow entirely if effective alpha is negligible + if (effectiveAlpha >= 0.01) { + const bsX = (offsetX + alignOffX).toFixed(1) + const bsY = (offsetY + alignOffY).toFixed(1) + // Recompute shadow color with attenuated alpha + let attenuatedColor = shadowColor + if (shdColor) { + const hex2 = shdColor.startsWith('#') ? shdColor : `#${shdColor}` + const { r: sr2, g: sg2, b: sb2 } = hexToRgb(hex2) + attenuatedColor = `rgba(${sr2},${sg2},${sb2},${effectiveAlpha.toFixed(4)})` + } + wrapper.style.boxShadow = `${bsX}px ${bsY}px ${effectiveBlur.toFixed(1)}px ${spread.toFixed(1)}px ${attenuatedColor}` + } + } else { + wrapper.style.filter = `drop-shadow(${offsetX.toFixed(1)}px ${offsetY.toFixed(1)}px ${blurPx.toFixed(1)}px ${shadowColor})` + } + } + + // Reflection is not directly representable in standard CSS across browsers. + // Approximate via -webkit-box-reflect when available (Chromium/WebKit). + const reflection = effectiveEffectLst.child('reflection') + if (reflection.exists()) { + const dist = emuToPx(reflection.numAttr('dist') ?? 0) + const stA = (reflection.numAttr('stA') ?? 50000) / 100000 + const endA = (reflection.numAttr('endA') ?? 0) / 100000 + const stPos = Math.max(0, Math.min(100, (reflection.numAttr('stPos') ?? 0) / 1000)) + const endPos = Math.max(0, Math.min(100, (reflection.numAttr('endPos') ?? 100000) / 1000)) + const mask = `linear-gradient(to bottom, rgba(255,255,255,${stA.toFixed(3)}) ${stPos.toFixed(1)}%, rgba(255,255,255,${endA.toFixed(3)}) ${endPos.toFixed(1)}%)` + const reflectValue = `below ${dist.toFixed(1)}px ${mask}` + wrapper.style.setProperty('-webkit-box-reflect', reflectValue) + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(wrapper.style as any).webkitBoxReflect = reflectValue + } + } + + // ---- Shape-level hyperlink / action button navigation ---- + if (node.hlinkClick && ctx.onNavigate) { + const { action, rId } = node.hlinkClick + if (action === 'ppaction://hlinksldjump' && rId) { + // Resolve slide target from relationship + const rel = ctx.slide.rels.get(rId) + if (rel) { + // Target is like "slide28.xml" → slide index 27 (0-based) + const match = rel.target.match(/slide(\d+)\.xml/) + if (match) { + const slideIndex = Number.parseInt(match[1], 10) - 1 + wrapper.style.cursor = 'pointer' + wrapper.title = node.hlinkClick.tooltip || `Go to slide ${slideIndex + 1}` + wrapper.addEventListener('click', (e) => { + e.stopPropagation() + ctx.onNavigate!({ slideIndex }) + }) + } + } + } else if (rId) { + // External URL link + const rel = ctx.slide.rels.get(rId) + if (rel && rel.targetMode === 'External' && isAllowedExternalUrl(rel.target)) { + wrapper.style.cursor = 'pointer' + wrapper.title = node.hlinkClick.tooltip || rel.target + wrapper.addEventListener('click', (e) => { + e.stopPropagation() + ctx.onNavigate!({ url: rel.target }) + }) + } + } + } + + return wrapper +} diff --git a/apps/sim/lib/pptx-renderer/renderer/slide-renderer.ts b/apps/sim/lib/pptx-renderer/renderer/slide-renderer.ts new file mode 100644 index 00000000000..66aa0a3f21b --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/slide-renderer.ts @@ -0,0 +1,315 @@ +/** + * Slide renderer — orchestrates rendering of a complete slide with all its nodes. + */ + +import type { ECharts } from 'echarts' +import type { BaseNodeData } from '../model/nodes/base-node' +import type { ChartNodeData } from '../model/nodes/chart-node' +import { type GroupNodeData, parseGroupNode } from '../model/nodes/group-node' +import { type PicNodeData, parsePicNode } from '../model/nodes/pic-node' +import { parseShapeNode, type ShapeNodeData } from '../model/nodes/shape-node' +import { parseTableNode, type TableNodeData } from '../model/nodes/table-node' +import type { PresentationData } from '../model/presentation' +import type { SlideData } from '../model/slide' +import type { SafeXmlNode } from '../parser/xml-parser' +import { renderBackground } from './background-renderer' +import { renderChart } from './chart-renderer' +import { renderGroup } from './group-renderer' +import { renderImage } from './image-renderer' +import { createRenderContext, type RenderContext } from './render-context' +import { renderShape } from './shape-renderer' +import { renderTable } from './table-renderer' + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +export interface SlideRendererOptions { + /** Called when a single node fails to render. */ + onNodeError?: (nodeId: string, error: unknown) => void + /** + * Navigation callback for shape-level hyperlink actions (action buttons, etc.). + * Called with target slide index (0-based) for slide jumps, + * or with a URL string for external links. + */ + onNavigate?: (target: { slideIndex?: number; url?: string }) => void + /** Shared media URL cache for blob URL reuse across slides. */ + mediaUrlCache?: Map + /** Shared set of live ECharts instances for explicit disposal. */ + chartInstances?: Set +} + +/** + * Per-slide resource handle returned by `renderSlide()`. + * Allows the caller to dispose of slide-specific resources (chart instances, + * blob URLs in standalone mode) without tearing down the whole viewer. + */ +export interface SlideHandle { + /** The rendered slide DOM element. */ + readonly element: HTMLElement + /** Dispose slide-specific resources (charts inside this slide, blob URLs if standalone). */ + dispose(): void + /** Support `using` declarations (TC39 Explicit Resource Management). */ + [Symbol.dispose](): void +} + +// --------------------------------------------------------------------------- +// Node Dispatch +// --------------------------------------------------------------------------- + +/** + * Dispatch a typed node to its appropriate renderer. + * This function is also passed into GroupRenderer for recursive child rendering. + */ +function renderNode(node: BaseNodeData, ctx: RenderContext): HTMLElement { + switch (node.nodeType) { + case 'shape': + return renderShape(node as ShapeNodeData, ctx) + case 'picture': + return renderImage(node as PicNodeData, ctx) + case 'table': + return renderTable(node as TableNodeData, ctx) + case 'group': + return renderGroup(node as GroupNodeData, ctx, renderNode) + case 'chart': + return renderChart(node as ChartNodeData, ctx) + default: { + // Unknown node type — render as empty positioned div + const el = document.createElement('div') + el.style.position = 'absolute' + el.style.left = `${node.position.x}px` + el.style.top = `${node.position.y}px` + el.style.width = `${node.size.w}px` + el.style.height = `${node.size.h}px` + return el + } + } +} + +// --------------------------------------------------------------------------- +// Error Placeholder +// --------------------------------------------------------------------------- + +/** + * Create a visual error placeholder at the node's position. + */ +function createErrorPlaceholder(node: BaseNodeData): HTMLElement { + const el = document.createElement('div') + el.style.position = 'absolute' + el.style.left = `${node.position.x}px` + el.style.top = `${node.position.y}px` + el.style.width = `${node.size.w}px` + el.style.height = `${node.size.h}px` + el.style.border = '2px dashed #ff4444' + el.style.backgroundColor = 'rgba(255,68,68,0.08)' + el.style.display = 'flex' + el.style.alignItems = 'center' + el.style.justifyContent = 'center' + el.style.color = '#cc0000' + el.style.fontSize = '11px' + el.style.fontFamily = 'monospace' + el.style.overflow = 'hidden' + el.style.boxSizing = 'border-box' + el.style.padding = '4px' + el.textContent = `Render Error` + el.title = `Failed to render node: ${node.id} (${node.name})` + return el +} + +// --------------------------------------------------------------------------- +// Master/Layout Shape Parsing +// --------------------------------------------------------------------------- + +/** + * Check whether a shape node is a placeholder (has p:ph in nvPr). + */ +function isPlaceholderNode(node: SafeXmlNode): boolean { + for (const wrapper of ['nvSpPr', 'nvPicPr', 'nvGrpSpPr', 'nvGraphicFramePr', 'nvCxnSpPr']) { + const nv = node.child(wrapper) + if (nv.exists()) { + const nvPr = nv.child('nvPr') + if (nvPr.child('ph').exists()) return true + } + } + return false +} + +/** + * Parse and collect renderable shapes from a master or layout spTree. + * Only includes NON-placeholder shapes (decorative elements, logos, footers). + * Placeholder shapes are never rendered from master/layout — they only serve + * as position/size inheritance templates. + */ +function parseTemplateShapes(spTree: SafeXmlNode, _slideNodes: BaseNodeData[]): BaseNodeData[] { + const nodes: BaseNodeData[] = [] + if (!spTree || !spTree.exists || !spTree.exists()) return nodes + + for (const child of spTree.allChildren()) { + const tag = child.localName + + // Skip ALL placeholder shapes — they're templates, not renderable content + if (isPlaceholderNode(child)) continue + + try { + let node: BaseNodeData | undefined + switch (tag) { + case 'sp': + case 'cxnSp': + node = parseShapeNode(child) + break + case 'pic': + node = parsePicNode(child) + break + case 'grpSp': + node = parseGroupNode(child) + break + case 'graphicFrame': { + const graphic = child.child('graphic') + const graphicData = graphic.child('graphicData') + if (graphicData.child('tbl').exists()) { + node = parseTableNode(child) + } + break + } + } + // Skip empty/invisible nodes (0x0 size and no text) + if (node && (node.size.w > 0 || node.size.h > 0)) { + nodes.push(node) + } + } catch { + // Skip unparseable template shapes silently + } + } + return nodes +} + +// --------------------------------------------------------------------------- +// Main Slide Render Function +// --------------------------------------------------------------------------- + +/** + * Render a complete slide into an HTML element. + * + * Rendering order: + * 1. Background (slide → layout → master inheritance) + * 2. Master non-placeholder shapes (behind everything) + * 3. Layout non-placeholder shapes + * 4. Slide shapes (on top) + */ +export function renderSlide( + presentation: PresentationData, + slide: SlideData, + options?: SlideRendererOptions +): SlideHandle { + const isSharedCache = !!options?.mediaUrlCache + + // Create render context (resolves slide -> layout -> master -> theme chain) + const ctx = createRenderContext( + presentation, + slide, + options?.mediaUrlCache, + options?.chartInstances + ) + if (options?.onNavigate) { + ctx.onNavigate = options.onNavigate + } + + // Create slide container + const container = document.createElement('div') + container.style.position = 'relative' + container.style.width = `${presentation.width}px` + container.style.height = `${presentation.height}px` + container.style.overflow = 'hidden' + container.style.backgroundColor = '#FFFFFF' + + // Render background + try { + renderBackground(ctx, container) + } catch (e) { + options?.onNodeError?.('__background__', e) + } + + // --- Render master template shapes (behind layout and slide) --- + // Respect showMasterSp flags: + // - layout.showMasterSp === false → skip master shapes + // - slide.showMasterSp === false → skip both master AND layout shapes + if (slide.showMasterSp && ctx.layout.showMasterSp) { + const masterCtx: RenderContext = { + ...ctx, + slide: { ...ctx.slide, rels: ctx.master.rels }, + } + const masterShapes = parseTemplateShapes(ctx.master.spTree, slide.nodes) + for (const node of masterShapes) { + try { + const el = renderNode(node, masterCtx) + container.appendChild(el) + } catch { + // Master shape errors are non-fatal + } + } + } + + // --- Render layout template shapes --- + if (slide.showMasterSp) { + const layoutCtx: RenderContext = { + ...ctx, + slide: { ...ctx.slide, rels: ctx.layout.rels }, + } + const layoutShapes = parseTemplateShapes(ctx.layout.spTree, slide.nodes) + for (const node of layoutShapes) { + try { + const el = renderNode(node, layoutCtx) + container.appendChild(el) + } catch { + // Layout shape errors are non-fatal + } + } + } + + // --- Render slide shapes (on top) --- + for (const node of slide.nodes) { + try { + const el = renderNode(node, ctx) + container.appendChild(el) + } catch (e) { + options?.onNodeError?.(node.id, e) + container.appendChild(createErrorPlaceholder(node)) + } + } + + // Build SlideHandle + let disposed = false + const chartInstances = options?.chartInstances + const mediaUrlCache = ctx.mediaUrlCache + + const dispose = (): void => { + if (disposed) return + disposed = true + + // Dispose chart instances whose DOM is inside this slide container + if (chartInstances) { + for (const chart of chartInstances) { + if (!chart.isDisposed() && container.contains(chart.getDom())) { + chart.dispose() + chartInstances.delete(chart) + } + } + } + + // Revoke blob URLs only in standalone mode (caller doesn't own a shared cache) + if (!isSharedCache) { + for (const url of mediaUrlCache.values()) { + URL.revokeObjectURL(url) + } + mediaUrlCache.clear() + } + } + + return { + element: container, + dispose, + [Symbol.dispose](): void { + dispose() + }, + } +} diff --git a/apps/sim/lib/pptx-renderer/renderer/style-resolver.ts b/apps/sim/lib/pptx-renderer/renderer/style-resolver.ts new file mode 100644 index 00000000000..d0a515ce4cf --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/style-resolver.ts @@ -0,0 +1,815 @@ +/** + * Style resolver — converts OOXML color and fill nodes to CSS values. + */ + +import { angleToDeg, emuToPx, pctToDecimal } from '../parser/units' +import type { SafeXmlNode } from '../parser/xml-parser' +import type { ColorModifier } from '../utils/color' +import { applyColorModifiers, hexToRgb, hslToRgb, presetColorToHex, rgbToHex } from '../utils/color' +import type { RenderContext } from './render-context' + +// --------------------------------------------------------------------------- +// Color Resolution +// --------------------------------------------------------------------------- + +/** + * Build a cache key for a color node based on its tag, value, and modifiers. + */ +function buildColorCacheKey(colorNode: SafeXmlNode): string { + const parts: string[] = [colorNode.localName, colorNode.attr('val') ?? ''] + for (const child of colorNode.allChildren()) { + const tag = child.localName + const val = child.attr('val') + if (tag) parts.push(`${tag}:${val ?? ''}`) + // Include nested color children for wrapper nodes + for (const grandchild of child.allChildren()) { + const gtag = grandchild.localName + const gval = grandchild.attr('val') + if (gtag) parts.push(`${gtag}:${gval ?? ''}`) + } + } + return parts.join('|') +} + +/** + * Collect OOXML color modifier children from a color node. + * Modifiers are child elements like alpha, lumMod, lumOff, tint, shade, satMod, hueMod. + */ +function collectModifiers(colorNode: SafeXmlNode): ColorModifier[] { + const modifiers: ColorModifier[] = [] + for (const child of colorNode.allChildren()) { + const name = child.localName + const val = child.numAttr('val') + if (val !== undefined && name) { + modifiers.push({ name, val }) + } + } + return modifiers +} + +/** + * Resolve a scheme color name through the master colorMap then theme colorScheme. + * + * OOXML scheme colors use logical names (e.g., "tx1", "bg1", "accent1"). + * The master's colorMap remaps some of these (e.g., "tx1" -> "dk1"). + * The theme's colorScheme holds the actual hex values keyed by the mapped name. + */ +function resolveSchemeColor(schemeName: string, ctx: RenderContext): string { + // Apply colorMap remapping (layout override takes priority) + let mappedName = schemeName + if (ctx.layout.colorMapOverride) { + const override = ctx.layout.colorMapOverride.get(schemeName) + if (override) mappedName = override + } + if (mappedName === schemeName) { + const mapped = ctx.master.colorMap.get(schemeName) + if (mapped) mappedName = mapped + } + + // Look up in theme color scheme + const hex = ctx.theme.colorScheme.get(mappedName) + if (hex) return hex + + // Fallback: try the original name directly in theme + const fallback = ctx.theme.colorScheme.get(schemeName) + return fallback || '000000' +} + +/** + * Resolve an OOXML color node (srgbClr, schemeClr, sysClr, prstClr, hslClr, scrgbClr) + * into a CSS-ready hex color and alpha value. + */ +export function resolveColor( + colorNode: SafeXmlNode, + ctx: RenderContext +): { color: string; alpha: number } { + // Check cache + const cacheKey = buildColorCacheKey(colorNode) + const cached = ctx.colorCache.get(cacheKey) + if (cached) return cached + + const result = resolveColorUncached(colorNode, ctx) + ctx.colorCache.set(cacheKey, result) + return result +} + +function resolveColorUncached( + colorNode: SafeXmlNode, + ctx: RenderContext, + placeholderColorNode?: SafeXmlNode +): { color: string; alpha: number } { + // Iterate child elements to find the actual color type node + for (const child of colorNode.allChildren()) { + const tag = child.localName + const modifiers = collectModifiers(child) + + switch (tag) { + case 'srgbClr': { + const hex = child.attr('val') || '000000' + return applyColorModifiers(hex, modifiers) + } + + case 'schemeClr': { + const scheme = child.attr('val') || 'tx1' + if (scheme.toLowerCase() === 'phclr' && placeholderColorNode?.exists()) { + const base = resolveColor(placeholderColorNode, ctx) + const baseHex = base.color.startsWith('#') ? base.color.slice(1) : base.color + const adjusted = applyColorModifiers(baseHex, modifiers) + return { color: adjusted.color, alpha: adjusted.alpha * base.alpha } + } + const hex = resolveSchemeColor(scheme, ctx) + return applyColorModifiers(hex, modifiers) + } + + case 'sysClr': { + const hex = child.attr('lastClr') || child.attr('val') || '000000' + return applyColorModifiers(hex, modifiers) + } + + case 'prstClr': { + const name = child.attr('val') || 'black' + const hex = presetColorToHex(name) || '#000000' + return applyColorModifiers(hex.replace('#', ''), modifiers) + } + + case 'hslClr': { + const hue = (child.numAttr('hue') ?? 0) / 60000 // 60000ths of degree -> degrees + const sat = (child.numAttr('sat') ?? 0) / 100000 // percentage + const lum = (child.numAttr('lum') ?? 0) / 100000 + const rgb = hslToRgb(hue, sat, lum) + const hex = rgbToHex(rgb.r, rgb.g, rgb.b).replace('#', '') + return applyColorModifiers(hex, modifiers) + } + + case 'scrgbClr': { + // r, g, b are percentages (0-100000) + const r = Math.round(((child.numAttr('r') ?? 0) / 100000) * 255) + const g = Math.round(((child.numAttr('g') ?? 0) / 100000) * 255) + const b = Math.round(((child.numAttr('b') ?? 0) / 100000) * 255) + const hex = rgbToHex(r, g, b).replace('#', '') + return applyColorModifiers(hex, modifiers) + } + + default: + // Not a recognized color child — continue looking + break + } + } + + // If the node itself is a color type (no wrapper) + const selfTag = colorNode.localName + if (selfTag === 'srgbClr') { + const hex = colorNode.attr('val') || '000000' + return applyColorModifiers(hex, collectModifiers(colorNode)) + } + if (selfTag === 'schemeClr') { + const scheme = colorNode.attr('val') || 'tx1' + if (scheme.toLowerCase() === 'phclr' && placeholderColorNode?.exists()) { + const base = resolveColor(placeholderColorNode, ctx) + const baseHex = base.color.startsWith('#') ? base.color.slice(1) : base.color + const adjusted = applyColorModifiers(baseHex, collectModifiers(colorNode)) + return { color: adjusted.color, alpha: adjusted.alpha * base.alpha } + } + const hex = resolveSchemeColor(scheme, ctx) + return applyColorModifiers(hex, collectModifiers(colorNode)) + } + if (selfTag === 'sysClr') { + const hex = colorNode.attr('lastClr') || colorNode.attr('val') || '000000' + return applyColorModifiers(hex, collectModifiers(colorNode)) + } + if (selfTag === 'prstClr') { + const name = colorNode.attr('val') || 'black' + const hex = presetColorToHex(name) || '#000000' + return applyColorModifiers(hex.replace('#', ''), collectModifiers(colorNode)) + } + + return { color: '#000000', alpha: 1 } +} + +/** + * Resolve a color node and return a CSS color string. + * Convenience wrapper combining resolveColor + colorToCss. + */ +export function resolveColorToCss(node: SafeXmlNode, ctx: RenderContext): string { + const { color, alpha } = resolveColor(node, ctx) + return colorToCss(color, alpha) +} + +/** + * Convert a resolved color + alpha into a CSS rgba() string. + */ +function colorToCss(color: string, alpha: number): string { + const hex = color.startsWith('#') ? color : `#${color}` + const { r, g, b } = hexToRgb(hex) + if (alpha >= 1) { + return hex + } + return `rgba(${r},${g},${b},${alpha.toFixed(3)})` +} + +function resolveColorWithPlaceholder( + colorNode: SafeXmlNode, + ctx: RenderContext, + placeholderColorNode?: SafeXmlNode +): { color: string; alpha: number } { + if (!placeholderColorNode?.exists()) return resolveColor(colorNode, ctx) + return resolveColorUncached(colorNode, ctx, placeholderColorNode) +} + +// --------------------------------------------------------------------------- +// Fill Resolution +// --------------------------------------------------------------------------- + +/** + * Resolve a fill from shape properties (spPr) into a CSS background value. + * + * Returns: + * - CSS color/gradient string for solidFill/gradFill + * - 'transparent' for noFill + * - '' for blipFill (handled by ImageRenderer) or no fill found (inherit) + */ +export function resolveFill(spPr: SafeXmlNode, ctx: RenderContext): string { + // solidFill + const solidFill = spPr.child('solidFill') + if (solidFill.exists()) { + const { color, alpha } = resolveColor(solidFill, ctx) + return colorToCss(color, alpha) + } + + // gradFill + const gradFill = spPr.child('gradFill') + if (gradFill.exists()) { + return resolveGradient(gradFill, ctx) + } + + // blipFill — handled externally by ImageRenderer + const blipFill = spPr.child('blipFill') + if (blipFill.exists()) { + return '' + } + + // pattFill — pattern fill rendered as CSS repeating gradient + const pattFill = spPr.child('pattFill') + if (pattFill.exists()) { + return resolvePatternFill(pattFill, ctx) + } + + // grpFill — inherit fill from parent group + const grpFill = spPr.child('grpFill') + if (grpFill.exists()) { + if (ctx.groupFillNode) { + return resolveFill(ctx.groupFillNode, ctx) + } + // No group fill context available — fall through to no fill + return '' + } + + // noFill + const noFill = spPr.child('noFill') + if (noFill.exists()) { + return 'transparent' + } + + // No fill found — inherit + return '' +} + +// --------------------------------------------------------------------------- +// Pattern Fill Resolution +// --------------------------------------------------------------------------- + +/** + * Resolve `` into a CSS background value using repeating gradients. + * + * OOXML defines 40+ pattern presets. We support the most common ones and + * fall back to a simple foreground/background 50% mix for unknown patterns. + */ +function resolvePatternFill(pattFill: SafeXmlNode, ctx: RenderContext): string { + const preset = pattFill.attr('prst') ?? 'solid' + + // Foreground and background colors + let fg = '#000000' + let bg = '#ffffff' + + const fgClr = pattFill.child('fgClr') + if (fgClr.exists()) { + const { color, alpha } = resolveColor(fgClr, ctx) + fg = colorToCss(color, alpha) + } + + const bgClr = pattFill.child('bgClr') + if (bgClr.exists()) { + const { color, alpha } = resolveColor(bgClr, ctx) + bg = colorToCss(color, alpha) + } + + // Size of pattern tile in px + const s = 8 + + // Helper: returns CSS `background` shorthand with repeating pattern layer(s) over bg color. + // Format: " 0 0/, " + // This is a valid multi-layer CSS background shorthand. + const pat = (gradient: string): string => `${gradient} 0 0/${s}px ${s}px, ${bg}` + const pat2 = (g1: string, g2: string): string => + `${g1} 0 0/${s}px ${s}px, ${g2} 0 0/${s}px ${s}px, ${bg}` + + switch (preset) { + // Solid fills + case 'solid': + case 'solidDmnd': + return fg + + // Percentage fills (dots on background) + case 'pct5': + case 'pct10': + case 'pct20': + case 'pct25': + return pat(`radial-gradient(${fg} 1px, transparent 1px)`) + case 'pct30': + case 'pct40': + case 'pct50': + return pat(`radial-gradient(${fg} 1.5px, transparent 1.5px)`) + case 'pct60': + case 'pct70': + case 'pct75': + case 'pct80': + case 'pct90': + return pat(`radial-gradient(${fg} 2.5px, transparent 2.5px)`) + + // Horizontal lines + case 'horz': + case 'ltHorz': + case 'narHorz': + case 'dkHorz': + return pat( + `repeating-linear-gradient(0deg, ${fg} 0px, ${fg} 1px, transparent 1px, transparent ${s}px)` + ) + + // Vertical lines + case 'vert': + case 'ltVert': + case 'narVert': + case 'dkVert': + return pat( + `repeating-linear-gradient(90deg, ${fg} 0px, ${fg} 1px, transparent 1px, transparent ${s}px)` + ) + + // Diagonal lines (down-right) + case 'dnDiag': + case 'ltDnDiag': + case 'narDnDiag': + case 'dkDnDiag': + case 'wdDnDiag': + return pat( + `repeating-linear-gradient(45deg, ${fg} 0px, ${fg} 1px, transparent 1px, transparent ${s}px)` + ) + + // Diagonal lines (up-right) + case 'upDiag': + case 'ltUpDiag': + case 'narUpDiag': + case 'dkUpDiag': + case 'wdUpDiag': + return pat( + `repeating-linear-gradient(-45deg, ${fg} 0px, ${fg} 1px, transparent 1px, transparent ${s}px)` + ) + + // Grid (horizontal + vertical) + case 'smGrid': + case 'lgGrid': + case 'cross': + return pat2( + `repeating-linear-gradient(0deg, ${fg} 0px, ${fg} 1px, transparent 1px, transparent ${s}px)`, + `repeating-linear-gradient(90deg, ${fg} 0px, ${fg} 1px, transparent 1px, transparent ${s}px)` + ) + + // Diagonal cross + case 'smCheck': + case 'lgCheck': + case 'diagCross': + case 'openDmnd': + return pat2( + `repeating-linear-gradient(45deg, ${fg} 0px, ${fg} 1px, transparent 1px, transparent ${s}px)`, + `repeating-linear-gradient(-45deg, ${fg} 0px, ${fg} 1px, transparent 1px, transparent ${s}px)` + ) + + // Dot patterns + case 'dotGrid': + case 'dotDmnd': + return pat(`radial-gradient(${fg} 1px, transparent 1px)`) + + // Trellis / weave + case 'trellis': + case 'weave': + return pat2( + `repeating-linear-gradient(45deg, ${fg} 0px, ${fg} 2px, transparent 2px, transparent ${s}px)`, + `repeating-linear-gradient(-45deg, ${fg} 0px, ${fg} 2px, transparent 2px, transparent ${s}px)` + ) + + // Dash variants + case 'dashDnDiag': + case 'dashUpDiag': + case 'dashHorz': + case 'dashVert': { + const angle = preset.includes('Dn') + ? '45deg' + : preset.includes('Up') + ? '-45deg' + : preset.includes('Horz') + ? '0deg' + : '90deg' + return pat( + `repeating-linear-gradient(${angle}, ${fg} 0px, ${fg} 3px, transparent 3px, transparent ${s}px)` + ) + } + + // Sphere / shingle — radial gradient approximation + case 'sphere': + case 'shingle': + case 'plaid': + case 'divot': + case 'zigZag': + return pat(`radial-gradient(${fg} 2px, transparent 2px)`) + + default: + return bg + } +} + +/** + * Parse a gradient fill into a CSS gradient string. + */ +function resolveGradient( + gradFill: SafeXmlNode, + ctx: RenderContext, + placeholderColorNode?: SafeXmlNode +): string { + // Parse gradient stops + const gsLst = gradFill.child('gsLst') + const stops: { position: number; color: string }[] = [] + + for (const gs of gsLst.children('gs')) { + const pos = gs.numAttr('pos') ?? 0 + const posPercent = pctToDecimal(pos) * 100 + const { color, alpha } = resolveColorWithPlaceholder(gs, ctx, placeholderColorNode) + stops.push({ position: posPercent, color: colorToCss(color, alpha) }) + } + + if (stops.length === 0) { + return '' + } + + // Sort stops by position + stops.sort((a, b) => a.position - b.position) + + const stopsStr = stops.map((s) => `${s.color} ${s.position.toFixed(1)}%`).join(', ') + + // Determine gradient type + const lin = gradFill.child('lin') + if (lin.exists()) { + const angle = angleToDeg(lin.numAttr('ang') ?? 0) + // OOXML angle 0 = top-to-bottom in the gradient coordinate system + // CSS angle 0 = bottom-to-top, so we need to adjust + const cssAngle = (angle + 90) % 360 + return `linear-gradient(${cssAngle.toFixed(1)}deg, ${stopsStr})` + } + + const path = gradFill.child('path') + if (path.exists()) { + const pathType = path.attr('path') + if (pathType === 'circle' || pathType === 'shape' || pathType === 'rect') { + // OOXML path gradients: stop pos=0 = fillToRect center, pos=100000 = shape edge. + // CSS radial-gradient: 0% = center, 100% = edge. + // Conventions match — no reversal needed. + + // Resolve fillToRect center point + const ftr = path.child('fillToRect') + let cx = 50 + let cy = 50 + if (ftr.exists()) { + const l = (ftr.numAttr('l') ?? 0) / 100000 + const t = (ftr.numAttr('t') ?? 0) / 100000 + const r = (ftr.numAttr('r') ?? 0) / 100000 + const b = (ftr.numAttr('b') ?? 0) / 100000 + cx = ((l + (1 - r)) / 2) * 100 + cy = ((t + (1 - b)) / 2) * 100 + } + + if (pathType === 'rect') { + // Rectangular gradient (L∞ norm / Chebyshev distance): creates cross/X contour + // pattern. CSS can't do this natively; approximate by overlaying horizontal and + // vertical linear gradients with a radial gradient as fallback. + // The SVG path in ShapeRenderer uses the proper blend approach. + return `radial-gradient(closest-side at ${cx.toFixed(1)}% ${cy.toFixed(1)}%, ${stopsStr})` + } + + return `radial-gradient(ellipse at ${cx.toFixed(1)}% ${cy.toFixed(1)}%, ${stopsStr})` + } + } + + // Default to linear top-to-bottom + return `linear-gradient(180deg, ${stopsStr})` +} + +// --------------------------------------------------------------------------- +// Line Style Resolution +// --------------------------------------------------------------------------- + +/** + * Resolve a line (outline) node into CSS-compatible properties. + * + * @param ln The `` node from spPr + * @param ctx Render context + * @param lnRef Optional `` from `` — provides fallback color + * when `` has no explicit solidFill (common for connectors) + */ +export function resolveLineStyle( + ln: SafeXmlNode, + ctx: RenderContext, + lnRef?: SafeXmlNode +): { width: number; color: string; dash: string; dashKind: string } { + // Width: a:ln@w is in EMU, convert to px + const widthEmu = ln.numAttr('w') ?? 0 + let width = emuToPx(widthEmu) + + // Color from solidFill child + let color = 'transparent' + const solidFill = ln.child('solidFill') + if (solidFill.exists()) { + const phClr = solidFill.child('schemeClr') + const usesPlaceholder = phClr.exists() && (phClr.attr('val') ?? '').toLowerCase() === 'phclr' + if (usesPlaceholder && lnRef && lnRef.exists()) { + // Theme line styles often use schemeClr=phClr and expect the concrete color from lnRef. + const base = resolveColor(lnRef, ctx) + const baseHex = base.color.startsWith('#') ? base.color.slice(1) : base.color + const adjusted = applyColorModifiers(baseHex, collectModifiers(phClr)) + color = colorToCss(adjusted.color, adjusted.alpha * base.alpha) + } else { + const resolved = resolveColor(solidFill, ctx) + color = colorToCss(resolved.color, resolved.alpha) + } + } else if (lnRef?.exists() && (lnRef.numAttr('idx') ?? 0) > 0) { + const idx = lnRef.numAttr('idx') ?? 0 + // Look up theme line style for width, color, and dash + if (idx > 0 && ctx.theme.lineStyles && ctx.theme.lineStyles.length >= idx) { + const themeLn = ctx.theme.lineStyles[idx - 1] + // Get width from theme line if not set on the explicit ln node + if (width === 0) { + const themeW = themeLn.numAttr('w') ?? 0 + width = emuToPx(themeW) + } + // Get color: prefer lnRef's own color child, fall back to theme line's solidFill + const resolved = resolveColor(lnRef, ctx) + color = colorToCss(resolved.color, resolved.alpha) + } else { + // Fallback: use lnRef color directly, approximate width from idx + const resolved = resolveColor(lnRef, ctx) + color = colorToCss(resolved.color, resolved.alpha) + if (width === 0 && idx > 0) { + width = idx * 0.75 // approximate: idx 1 = ~0.75px, idx 2 = ~1.5px + } + } + } + + // Width fallback should still use lnRef/theme even when explicit solidFill is present on . + if (width === 0 && lnRef && lnRef.exists()) { + const idx = lnRef.numAttr('idx') ?? 0 + if (idx > 0 && ctx.theme.lineStyles && ctx.theme.lineStyles.length >= idx) { + const themeLn = ctx.theme.lineStyles[idx - 1] + const themeW = themeLn.numAttr('w') ?? 0 + width = emuToPx(themeW) + } else if (idx > 0) { + width = idx * 0.75 + } + } + + // Dash pattern + let dash = 'solid' + let dashKind = 'solid' + const prstDash = ln.child('prstDash') + if (prstDash.exists()) { + const val = prstDash.attr('val') || 'solid' + dashKind = val + dash = ooxmlDashToCss(val) + } + + // If no dash from explicit ln, check theme line style + if (dash === 'solid' && lnRef && lnRef.exists()) { + const idx = lnRef.numAttr('idx') ?? 0 + if (idx > 0 && ctx.theme.lineStyles && ctx.theme.lineStyles.length >= idx) { + const themeLn = ctx.theme.lineStyles[idx - 1] + const themeDash = themeLn.child('prstDash') + if (themeDash.exists()) { + dashKind = themeDash.attr('val') || 'solid' + dash = ooxmlDashToCss(dashKind) + } + } + } + + return { width, color, dash, dashKind } +} + +/** + * Map OOXML preset dash values to CSS border-style. + */ +function ooxmlDashToCss(val: string): string { + switch (val) { + case 'solid': + return 'solid' + case 'dot': + case 'sysDot': + return 'dotted' + case 'dash': + case 'sysDash': + case 'lgDash': + return 'dashed' + case 'dashDot': + case 'lgDashDot': + case 'lgDashDotDot': + case 'sysDashDot': + case 'sysDashDotDot': + return 'dashed' + default: + return 'solid' + } +} + +// --------------------------------------------------------------------------- +// Gradient Fill Resolution (structured data for SVG use) +// --------------------------------------------------------------------------- + +export interface GradientFillData { + type: 'linear' | 'radial' + stops: Array<{ position: number; color: string }> + /** SVG gradient interpolation space; OOXML gradients visually match linearRGB more closely. */ + colorInterpolation?: 'linearRGB' | 'sRGB' + /** OOXML angle in degrees (0 = top-to-bottom). Only relevant for linear gradients. */ + angle: number + /** Radial gradient center X as fraction 0–1. Default 0.5. */ + cx?: number + /** Radial gradient center Y as fraction 0–1. Default 0.5. */ + cy?: number + /** OOXML path type for radial gradients: 'rect', 'circle', or 'shape'. */ + pathType?: string +} + +function resolveGradientFillNode( + gradFill: SafeXmlNode, + ctx: RenderContext, + placeholderColorNode?: SafeXmlNode +): GradientFillData | null { + const gsLst = gradFill.child('gsLst') + const stops: Array<{ position: number; color: string }> = [] + + for (const gs of gsLst.children('gs')) { + const pos = gs.numAttr('pos') ?? 0 + const posPercent = pctToDecimal(pos) * 100 + const { color, alpha } = resolveColorWithPlaceholder(gs, ctx, placeholderColorNode) + stops.push({ position: posPercent, color: colorToCss(color, alpha) }) + } + + if (stops.length === 0) return null + stops.sort((a, b) => a.position - b.position) + + const lin = gradFill.child('lin') + if (lin.exists()) { + const angle = angleToDeg(lin.numAttr('ang') ?? 0) + return { type: 'linear', stops, angle, colorInterpolation: 'linearRGB' } + } + + const path = gradFill.child('path') + if (path.exists()) { + const pathType = path.attr('path') + if (pathType === 'circle' || pathType === 'shape' || pathType === 'rect') { + const ftr = path.child('fillToRect') + let cx = 0.5 + let cy = 0.5 + if (ftr.exists()) { + const l = (ftr.numAttr('l') ?? 0) / 100000 + const t = (ftr.numAttr('t') ?? 0) / 100000 + const r = (ftr.numAttr('r') ?? 0) / 100000 + const b = (ftr.numAttr('b') ?? 0) / 100000 + cx = (l + (1 - r)) / 2 + cy = (t + (1 - b)) / 2 + } + return { + type: 'radial', + stops, + angle: 0, + cx, + cy, + pathType: pathType, + colorInterpolation: 'linearRGB', + } + } + } + + return { type: 'linear', stops, angle: 0, colorInterpolation: 'linearRGB' } +} + +/** + * Resolve a gradient fill from `spPr` into structured data suitable for + * creating SVG gradient elements. Returns null if no gradient fill is present. + */ +export function resolveGradientFill( + spPr: SafeXmlNode, + ctx: RenderContext +): GradientFillData | null { + let gradFill = spPr.child('gradFill') + + // grpFill: inherit gradient from parent group's grpSpPr + if (!gradFill.exists() && spPr.child('grpFill').exists() && ctx.groupFillNode) { + gradFill = ctx.groupFillNode.child('gradFill') + } + + if (!gradFill.exists()) return null + + return resolveGradientFillNode(gradFill, ctx) +} + +export function resolveThemeFillReference( + fillRef: SafeXmlNode, + ctx: RenderContext +): { fillCss: string; gradientFillData: GradientFillData | null } { + const idx = fillRef.numAttr('idx') ?? 0 + if (idx <= 0 || (ctx.theme.fillStyles?.length ?? 0) < idx) { + return { fillCss: resolveColorToCss(fillRef, ctx), gradientFillData: null } + } + + const themeFill = ctx.theme.fillStyles[idx - 1] + if (!themeFill?.exists()) { + return { fillCss: resolveColorToCss(fillRef, ctx), gradientFillData: null } + } + + if (themeFill.localName === 'solidFill') { + const resolved = resolveColorWithPlaceholder(themeFill, ctx, fillRef) + return { fillCss: colorToCss(resolved.color, resolved.alpha), gradientFillData: null } + } + + if (themeFill.localName === 'gradFill') { + return { + fillCss: resolveGradient(themeFill, ctx, fillRef), + gradientFillData: resolveGradientFillNode(themeFill, ctx, fillRef), + } + } + + if (themeFill.localName === 'pattFill') { + return { fillCss: resolvePatternFill(themeFill, ctx), gradientFillData: null } + } + + if (themeFill.localName === 'noFill') { + return { fillCss: 'transparent', gradientFillData: null } + } + + return { fillCss: resolveColorToCss(fillRef, ctx), gradientFillData: null } +} + +// --------------------------------------------------------------------------- +// Gradient Stroke Resolution +// --------------------------------------------------------------------------- + +export interface GradientStrokeData { + stops: Array<{ position: number; color: string }> + angle: number + width: number + colorInterpolation?: 'linearRGB' | 'sRGB' +} + +/** + * Resolve a gradient stroke from an `` node that contains ``. + * Returns gradient stop data, angle, and line width — or null if no gradient fill is present. + */ +export function resolveGradientStroke( + ln: SafeXmlNode, + ctx: RenderContext +): GradientStrokeData | null { + const gradFill = ln.child('gradFill') + if (!gradFill.exists()) return null + + const gsLst = gradFill.child('gsLst') + const stops: Array<{ position: number; color: string }> = [] + + for (const gs of gsLst.children('gs')) { + const pos = gs.numAttr('pos') ?? 0 + const posPercent = pctToDecimal(pos) * 100 + const { color, alpha } = resolveColor(gs, ctx) + const cssColor = colorToCss(color, alpha) + stops.push({ position: posPercent, color: cssColor }) + } + + if (stops.length === 0) return null + stops.sort((a, b) => a.position - b.position) + + const lin = gradFill.child('lin') + let angle = 0 + if (lin.exists()) { + angle = angleToDeg(lin.numAttr('ang') ?? 0) + } + + const widthEmu = ln.numAttr('w') ?? 0 + let width = emuToPx(widthEmu) + // OOXML default when w is omitted is typically 1 pt; avoid invisible gradient stroke + if (width <= 0) width = 1 + + return { stops, angle, width, colorInterpolation: 'linearRGB' } +} diff --git a/apps/sim/lib/pptx-renderer/renderer/table-renderer.ts b/apps/sim/lib/pptx-renderer/renderer/table-renderer.ts new file mode 100644 index 00000000000..f457d03d713 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/table-renderer.ts @@ -0,0 +1,608 @@ +/** + * Table renderer — converts TableNodeData into positioned HTML table elements. + * + * Table style behavior follows: + * - OOXML ECMA-376 §21.1.3.15 tblPr: firstRow, firstCol, bandRow, bandCol, lastRow, lastCol + * are attributes; when not specified they default to off (no styling). + * - references/pptxjs (gen-table.ts, get-table-row-style.ts, get-table-cell-params.ts): + * reads tblPr attrs only (e.g. firstCol === "1"), applies style parts when attr is "1", + * and uses tcTxStyle from each part for cell text color/font (a:tcTxStyle under firstRow, firstCol, etc.). + */ + +import type { TableCell, TableNodeData } from '../model/nodes/table-node' +import { emuToPx } from '../parser/units' +import type { SafeXmlNode } from '../parser/xml-parser' +import { hexToRgb } from '../utils/color' +import { getPredefinedTableStyle } from './predefined-table-styles' +import type { RenderContext } from './render-context' +import { resolveColor, resolveLineStyle } from './style-resolver' +import { renderTextBody } from './text-renderer' + +// --------------------------------------------------------------------------- +// Table Style Lookup +// --------------------------------------------------------------------------- + +/** + * Find a table style node by its ID from presentation.tableStyles. + * tableStyles XML structure: + */ +function findTableStyle( + tableStyleId: string | undefined, + ctx: RenderContext +): SafeXmlNode | undefined { + if (!tableStyleId || !ctx.presentation.tableStyles) return undefined + const tblStyleLst = ctx.presentation.tableStyles + for (const style of tblStyleLst.children('tblStyle')) { + if (style.attr('styleId') === tableStyleId) { + return style + } + } + // Also check from root if tableStyles IS the tblStyleLst + for (const style of tblStyleLst.children()) { + if (style.localName === 'tblStyle' && style.attr('styleId') === tableStyleId) { + return style + } + } + // Fallback: check predefined (built-in) Office table styles not embedded in the PPTX + return getPredefinedTableStyle(tableStyleId) +} + +/** + * Get the appropriate style section from a table style for a given cell position. + * Priority: specific section > wholeTbl (fallback). + */ +function getStyleSections( + tblStyle: SafeXmlNode, + rowIdx: number, + colIdx: number, + totalRows: number, + totalCols: number, + tblPr: SafeXmlNode | undefined +): SafeXmlNode[] { + const sections: SafeXmlNode[] = [] + + // Style parts enabled only when tblPr has attribute "1" (or true); per spec default is off. + // pptxjs uses attrs only (firstCol === "1"); we also accept child elements for compatibility. + const flag = (attrName: string, childName: string): boolean => { + if (!tblPr) return false + const attr = tblPr.attr(attrName) + if (attr !== undefined) return attr === '1' || attr === 'true' + const ch = tblPr.child(childName) + if (ch.exists()) { + const val = ch.attr('val') + return val !== '0' && val !== 'false' + } + return false + } + const bandRow = + tblPr?.attr('bandRow') === '1' || + tblPr?.attr('bandRow') === 'true' || + tblPr?.child('bandRow').exists() + const bandCol = + tblPr?.attr('bandCol') === '1' || + tblPr?.attr('bandCol') === 'true' || + tblPr?.child('bandCol').exists() + const isFirstRow = flag('firstRow', 'firstRow') + const isLastRow = flag('lastRow', 'lastRow') + const isFirstCol = flag('firstCol', 'firstCol') + const isLastCol = flag('lastCol', 'lastCol') + + // wholeTbl is the base (lowest priority) + const wholeTbl = tblStyle.child('wholeTbl') + if (wholeTbl.exists()) sections.push(wholeTbl) + + // Banding (applied on top of wholeTbl) + if (bandRow) { + const effectiveRow = isFirstRow ? rowIdx - 1 : rowIdx + if (effectiveRow >= 0 && effectiveRow % 2 === 1) { + const band = tblStyle.child('band2H') + if (band.exists()) sections.push(band) + } else if (effectiveRow >= 0 && effectiveRow % 2 === 0) { + const band = tblStyle.child('band1H') + if (band.exists()) sections.push(band) + } + } + + if (bandCol) { + if (colIdx % 2 === 1) { + const band = tblStyle.child('band2V') + if (band.exists()) sections.push(band) + } else { + const band = tblStyle.child('band1V') + if (band.exists()) sections.push(band) + } + } + + // Special rows/cols (highest priority, override banding) + if (isFirstRow && rowIdx === 0) { + const s = tblStyle.child('firstRow') + if (s.exists()) sections.push(s) + } + if (isLastRow && rowIdx === totalRows - 1) { + const s = tblStyle.child('lastRow') + if (s.exists()) sections.push(s) + } + if (isFirstCol && colIdx === 0) { + const s = tblStyle.child('firstCol') + if (s.exists()) sections.push(s) + } + if (isLastCol && colIdx === totalCols - 1) { + const s = tblStyle.child('lastCol') + if (s.exists()) sections.push(s) + } + + return sections +} + +/** Resolved text properties from table style tcTxStyle. */ +interface TableStyleTextProps { + color?: string + bold?: boolean + italic?: boolean + fontFamily?: string +} + +/** + * Get the effective text properties from table style sections (last section with tcTxStyle wins). + * tcTxStyle supports: b (bold), i (italic), and color children (schemeClr, solidFill, etc.). + * When a style part (e.g. firstCol, firstRow) is applied, we use that part's tcTxStyle for cell + * text styling so text stays readable on styled fill. + */ +function getEffectiveTableStyleTextProps( + sections: SafeXmlNode[], + ctx: RenderContext +): TableStyleTextProps | undefined { + for (let i = sections.length - 1; i >= 0; i--) { + const tcTxStyle = sections[i].child('tcTxStyle') + if (!tcTxStyle.exists()) continue + + const props: TableStyleTextProps = {} + + // Bold: b="on" or b="off" (OOXML CT_TableStyleTextStyle) + const b = tcTxStyle.attr('b') + if (b === 'on') props.bold = true + else if (b === 'off') props.bold = false + + // Italic: i="on" or i="off" + const italic = tcTxStyle.attr('i') + if (italic === 'on') props.italic = true + else if (italic === 'off') props.italic = false + + // Color: child elements (schemeClr, solidFill, srgbClr, etc.) + for (const child of tcTxStyle.allChildren()) { + const tag = child.localName + if ( + tag === 'schemeClr' || + tag === 'solidFill' || + tag === 'srgbClr' || + tag === 'scrgbClr' || + tag === 'prstClr' || + tag === 'sysClr' + ) { + const { color, alpha } = resolveColor(child, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b: bl } = hexToRgb(hex) + props.color = `rgba(${r},${g},${bl},${alpha.toFixed(3)})` + } else { + props.color = hex + } + break + } + } + + // Font family: // typeface or + const font = tcTxStyle.child('font') + if (font.exists()) { + const latin = font.child('latin').attr('typeface') + const ea = font.child('ea').attr('typeface') + const cs = font.child('cs').attr('typeface') + props.fontFamily = latin || ea || cs + } + if (!props.fontFamily) { + const fontRef = tcTxStyle.child('fontRef') + if (fontRef.exists()) { + const idx = fontRef.attr('idx') + if (idx === 'major') { + props.fontFamily = ctx.theme.majorFont.latin || ctx.theme.majorFont.ea + } else if (idx === 'minor') { + props.fontFamily = ctx.theme.minorFont.latin || ctx.theme.minorFont.ea + } + } + } + + return props + } + return undefined +} + +/** + * Apply fill from a table style tcStyle node. + * Structure: ... or ... + */ +function applyStyleFill(td: HTMLElement, tcStyle: SafeXmlNode, ctx: RenderContext): boolean { + const fill = tcStyle.child('fill') + if (!fill.exists()) return false + + // solidFill + const solidFill = fill.child('solidFill') + if (solidFill.exists()) { + const { color, alpha } = resolveColor(solidFill, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b } = hexToRgb(hex) + td.style.backgroundColor = `rgba(${r},${g},${b},${alpha.toFixed(3)})` + } else { + td.style.backgroundColor = hex + } + return true + } + + // fillRef (theme fill reference) + const fillRef = fill.child('fillRef') + if (fillRef.exists()) { + // fillRef contains a color child + idx attribute + const { color, alpha } = resolveColor(fillRef, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b } = hexToRgb(hex) + td.style.backgroundColor = `rgba(${r},${g},${b},${alpha.toFixed(3)})` + } else { + td.style.backgroundColor = hex + } + return true + } + + // noFill + const noFill = fill.child('noFill') + if (noFill.exists()) return true // explicitly no fill + + return false +} + +/** + * Apply borders from a table style tcStyle node. + * Structure: /// ... + */ +function applyStyleBorders( + td: HTMLElement, + tcStyle: SafeXmlNode, + ctx: RenderContext, + rowIdx?: number, + colIdx?: number, + totalRows?: number, + totalCols?: number +): void { + const tcBdr = tcStyle.child('tcBdr') + if (!tcBdr.exists()) return + + const borderMap: Array<[string, 'borderTop' | 'borderBottom' | 'borderLeft' | 'borderRight']> = [ + ['top', 'borderTop'], + ['bottom', 'borderBottom'], + ['left', 'borderLeft'], + ['right', 'borderRight'], + ] + + // Map insideH/insideV to individual cell borders: + // insideH → borderBottom for non-last rows, borderTop for non-first rows + // insideV → borderRight for non-last cols, borderLeft for non-first cols + const insideH = tcBdr.child('insideH') + if (insideH.exists() && rowIdx !== undefined && totalRows !== undefined) { + if (rowIdx < totalRows - 1) { + borderMap.push(['insideH', 'borderBottom']) + } + if (rowIdx > 0) { + borderMap.push(['insideH', 'borderTop']) + } + } + const insideV = tcBdr.child('insideV') + if (insideV.exists() && colIdx !== undefined && totalCols !== undefined) { + if (colIdx < totalCols - 1) { + borderMap.push(['insideV', 'borderRight']) + } + if (colIdx > 0) { + borderMap.push(['insideV', 'borderLeft']) + } + } + + for (const [xmlName, cssProp] of borderMap) { + const side = tcBdr.child(xmlName) + if (!side.exists()) continue + + // Direct element + const ln = side.child('ln') + if (ln.exists()) { + const noFill = ln.child('noFill') + if (noFill.exists()) continue + + const style = resolveLineStyle(ln, ctx) + if (style.width > 0 && style.color !== 'transparent') { + td.style[cssProp] = `${Math.max(style.width, 0.5)}px ${style.dash} ${style.color}` + } + continue + } + + // — reference to theme line style (common in table styles) + const lnRef = side.child('lnRef') + if (lnRef.exists()) { + const idx = lnRef.numAttr('idx') ?? 0 + if (idx === 0) continue // idx 0 = no line + + // Resolve color from the lnRef's child color element + const { color, alpha } = resolveColor(lnRef, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + + // Get width from theme line style + let width = 1 // default 1px + if (ctx.theme.lineStyles && ctx.theme.lineStyles.length >= idx) { + const themeLn = ctx.theme.lineStyles[idx - 1] + const themeW = themeLn.numAttr('w') ?? 12700 // default 1pt + width = emuToPx(themeW) + } + + const cssColor = + alpha < 1 + ? `rgba(${hexToRgb(hex).r},${hexToRgb(hex).g},${hexToRgb(hex).b},${alpha.toFixed(3)})` + : hex + if (width > 0) { + td.style[cssProp] = `${Math.max(width, 0.5)}px solid ${cssColor}` + } + } + } +} + +/** + * Apply table-level background from tblStyle > tblBg. + * tblBg can contain fillRef (theme fill reference) or solidFill. + */ +function applyTableBackground(table: HTMLElement, tblStyle: SafeXmlNode, ctx: RenderContext): void { + const tblBg = tblStyle.child('tblBg') + if (!tblBg.exists()) return + + // fillRef: references a theme fill style with a color override + const fillRef = tblBg.child('fillRef') + if (fillRef.exists()) { + const { color, alpha } = resolveColor(fillRef, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b } = hexToRgb(hex) + table.style.backgroundColor = `rgba(${r},${g},${b},${alpha.toFixed(3)})` + } else { + table.style.backgroundColor = hex + } + return + } + + // solidFill + const solidFill = tblBg.child('solidFill') + if (solidFill.exists()) { + const { color, alpha } = resolveColor(solidFill, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b } = hexToRgb(hex) + table.style.backgroundColor = `rgba(${r},${g},${b},${alpha.toFixed(3)})` + } else { + table.style.backgroundColor = hex + } + } +} + +// --------------------------------------------------------------------------- +// Table Rendering +// --------------------------------------------------------------------------- + +/** + * Render a table node into an absolutely-positioned HTML element. + */ +export function renderTable(node: TableNodeData, ctx: RenderContext): HTMLElement { + const wrapper = document.createElement('div') + wrapper.style.position = 'absolute' + wrapper.style.left = `${node.position.x}px` + wrapper.style.top = `${node.position.y}px` + wrapper.style.width = `${node.size.w}px` + wrapper.style.height = `${node.size.h}px` + wrapper.style.overflow = 'hidden' + + // Apply transforms + const transforms: string[] = [] + if (node.rotation !== 0) { + transforms.push(`rotate(${node.rotation}deg)`) + } + if (node.flipH) { + transforms.push('scaleX(-1)') + } + if (node.flipV) { + transforms.push('scaleY(-1)') + } + if (transforms.length > 0) { + wrapper.style.transform = transforms.join(' ') + } + + // Resolve table style + const tblStyle = findTableStyle(node.tableStyleId, ctx) + const tblPr = node.properties + const totalRows = node.rows.length + const totalCols = node.columns.length + + // Create table element + const table = document.createElement('table') + table.style.borderCollapse = 'collapse' + table.style.width = '100%' + table.style.height = '100%' + table.style.tableLayout = 'fixed' + + // Apply table background from table style (tblBg) + if (tblStyle) { + applyTableBackground(table, tblStyle, ctx) + } + + // Column widths + const totalWidth = node.columns.reduce((sum, w) => sum + w, 0) + if (totalWidth > 0 && node.columns.length > 0) { + const colgroup = document.createElement('colgroup') + for (const colW of node.columns) { + const col = document.createElement('col') + col.style.width = `${(colW / totalWidth) * 100}%` + colgroup.appendChild(col) + } + table.appendChild(colgroup) + } + + // Compute total row height so we can express each row as a proportion + const totalRowHeight = node.rows.reduce((sum, r) => sum + r.height, 0) + + // Render rows + const tbody = document.createElement('tbody') + let colIdx = 0 + for (let rowIdx = 0; rowIdx < node.rows.length; rowIdx++) { + const row = node.rows[rowIdx] + const tr = document.createElement('tr') + if (row.height > 0 && totalRowHeight > 0) { + // Use percentage heights so rows stay proportional within the + // table's constrained height instead of expanding beyond it. + tr.style.height = `${(row.height / totalRowHeight) * 100}%` + } + + colIdx = 0 + for (const cell of row.cells) { + // Skip merged cells + if (cell.hMerge || cell.vMerge) { + colIdx++ + continue + } + + const td = document.createElement('td') + td.style.overflow = 'hidden' + + // Spanning + if (cell.gridSpan > 1) { + td.colSpan = cell.gridSpan + } + if (cell.rowSpan > 1) { + td.rowSpan = cell.rowSpan + } + + // Apply table style first (as base), then direct tcPr overrides + let sections: SafeXmlNode[] = [] + if (tblStyle) { + sections = getStyleSections(tblStyle, rowIdx, colIdx, totalRows, totalCols, tblPr) + // Apply sections in order (later sections override earlier ones) + for (const section of sections) { + const tcStyle = section.child('tcStyle') + if (tcStyle.exists()) { + applyStyleFill(td, tcStyle, ctx) + applyStyleBorders(td, tcStyle, ctx, rowIdx, colIdx, totalRows, totalCols) + } + } + } + + // Apply direct cell properties (override table style) + applyCellProperties(td, cell, ctx) + + // Resolve table style text properties (color, bold, italic from tcTxStyle) + const textProps = + sections.length > 0 ? getEffectiveTableStyleTextProps(sections, ctx) : undefined + + // Render text inside cell + if (cell.textBody) { + const opts = textProps + ? { + cellTextColor: textProps.color, + cellTextBold: textProps.bold, + cellTextItalic: textProps.italic, + cellTextFontFamily: textProps.fontFamily, + } + : undefined + renderTextBody(cell.textBody, undefined, ctx, td, opts) + } + + tr.appendChild(td) + colIdx += cell.gridSpan + } + + tbody.appendChild(tr) + } + + table.appendChild(tbody) + wrapper.appendChild(table) + return wrapper +} + +// --------------------------------------------------------------------------- +// Cell Property Application +// --------------------------------------------------------------------------- + +/** + * Apply table cell properties (tcPr) to a
    + {headerGroups.map((g) => g.kind === 'workflow' ? ( handleConfigureWorkflowGroup(g.groupId)} + onRunColumn={userPermissions.canEdit ? handleRunColumn : undefined} + selectedRowIds={selectedRowIds} onInsertLeft={ userPermissions.canEdit ? handleInsertColumnLeft : undefined } @@ -2688,6 +2939,18 @@ export function Table({ onDeleteGroup={ userPermissions.canEdit ? handleDeleteWorkflowGroup : undefined } + onViewWorkflow={handleViewWorkflow} + readOnly={!userPermissions.canEdit} + onDragStart={ + userPermissions.canEdit ? handleColumnDragStart : undefined + } + onDragOver={ + userPermissions.canEdit ? handleColumnDragOver : undefined + } + onDragEnd={userPermissions.canEdit ? handleColumnDragEnd : undefined} + onDragLeave={ + userPermissions.canEdit ? handleColumnDragLeave : undefined + } /> ) : ( ))} {userPermissions.canEdit && ( - )}
    { - if (e.button !== 0) return - onRowToggle(rowIndex, e.shiftKey) - }} - > -
    -
    +
    +
    +
    { + if (e.button !== 0) return + onRowToggle(rowIndex, e.shiftKey) + }} + > @@ -3159,7 +3334,7 @@ const DataRow = React.memo(function DataRow({
    @@ -3167,14 +3342,16 @@ const DataRow = React.memo(function DataRow({
    {hasWorkflowColumns && ( - + )}
    - -
    -
    +
    { if (e.button !== 0) return onRowToggle(rowIndex, e.shiftKey) @@ -3326,7 +3310,7 @@ const DataRow = React.memo(function DataRow({ > @@ -3409,7 +3393,8 @@ const DataRow = React.memo(function DataRow({ {isHighlighted && (isMultiCell || isRowChecked) && (
    )} - {isAnchor &&
    } + {isAnchor && ( +
    + )}
    -
    +

    {titleByMode[config.mode]}

    diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/constants.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/constants.ts new file mode 100644 index 00000000000..74f504d75a1 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/constants.ts @@ -0,0 +1,2 @@ +export const WORKFLOW_SEARCH_HIGHLIGHT_CLASS = + 'rounded-sm bg-orange-400 shadow-[3px_0_0_#fb923c,-3px_0_0_#fb923c]' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/eval-input/eval-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/eval-input/eval-input.tsx index 97e22638656..4a93dee957f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/eval-input/eval-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/eval-input/eval-input.tsx @@ -5,11 +5,13 @@ import { Button, Input, Textarea, Tooltip } from '@/components/emcn' import { Trash } from '@/components/emcn/icons/trash' import { Label } from '@/components/ui/label' import { cn } from '@/lib/core/utils/cn' +import { WORKFLOW_SEARCH_HIGHLIGHT_CLASS } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/constants' import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text' import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown' import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes' +import type { ActiveSearchTarget } from '@/stores/panel/editor/store' interface EvalMetric { id: string @@ -27,6 +29,7 @@ interface EvalInputProps { isPreview?: boolean previewValue?: EvalMetric[] | null disabled?: boolean + activeSearchTarget?: ActiveSearchTarget | null } // Default values @@ -43,6 +46,7 @@ export function EvalInput({ isPreview = false, previewValue, disabled = false, + activeSearchTarget, }: EvalInputProps) { const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId) const accessiblePrefixes = useAccessibleReferencePrefixes(blockId) @@ -67,6 +71,17 @@ export function EvalInput({ const defaultMetric = useMemo(() => createDefaultMetric(), []) const metrics: EvalMetric[] = value || [defaultMetric] + const isNestedSearchHighlighted = (metricIndex: number, metricPath: Array) => + activeSearchTarget?.subBlockId === subBlockId && + activeSearchTarget.valuePath[0] === metricIndex && + metricPath.every((segment, index) => activeSearchTarget.valuePath[index + 1] === segment) + + const renderFieldLabel = (label: string, highlighted: boolean) => ( + + ) + const addMetric = () => { if (isPreview || disabled) return @@ -176,7 +191,7 @@ export function EvalInput({
    - + {renderFieldLabel('Name', isNestedSearchHighlighted(index, ['name']))}
    - + {renderFieldLabel('Description', isNestedSearchHighlighted(index, ['description']))}
    {(() => { const fieldState = inputController.fieldHelpers.getFieldState(metric.id) @@ -259,7 +274,7 @@ export function EvalInput({
    - + {renderFieldLabel('Min Value', isNestedSearchHighlighted(index, ['range', 'min']))}
    - + {renderFieldLabel('Max Value', isNestedSearchHighlighted(index, ['range', 'max']))} ) } diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/short-input/short-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/short-input/short-input.tsx index ba5ff8461b1..76476408b28 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/short-input/short-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/short-input/short-input.tsx @@ -46,6 +46,8 @@ interface ShortInputProps { wandControlRef?: React.MutableRefObject /** Whether to hide the internal wand button (controlled by parent) */ hideInternalWand?: boolean + /** Whether workflow search is actively highlighting this input */ + isSearchHighlighted?: boolean } /** @@ -74,6 +76,7 @@ export const ShortInput = memo(function ShortInput({ useWebhookUrl = false, wandControlRef, hideInternalWand = false, + isSearchHighlighted = false, }: ShortInputProps) { const [localContent, setLocalContent] = useState('') const [isFocused, setIsFocused] = useState(false) @@ -332,16 +335,15 @@ export const ShortInput = memo(function ShortInput({ ? webhookManagement.webhookUrl : ctrlValue - const displayValue = - password && !isFocused ? '•'.repeat(actualValue?.length ?? 0) : actualValue + const shouldMask = password && !isFocused && !isSearchHighlighted + const displayValue = shouldMask ? '•'.repeat(actualValue?.length ?? 0) : actualValue - const formattedText = - password && !isFocused - ? '•'.repeat(actualValue?.length ?? 0) - : formatDisplayText(actualValue, { - accessiblePrefixes, - highlightAll: !accessiblePrefixes, - }) + const formattedText = shouldMask + ? '•'.repeat(actualValue?.length ?? 0) + : formatDisplayText(actualValue, { + accessiblePrefixes, + highlightAll: !accessiblePrefixes, + }) return ( <> diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/starter/input-format.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/starter/input-format.tsx index 4e33f85ed8f..a759cc9d7fb 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/starter/input-format.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/starter/input-format.tsx @@ -20,11 +20,13 @@ import { } from '@/components/emcn' import { Label } from '@/components/ui/label' import { cn } from '@/lib/core/utils/cn' +import { WORKFLOW_SEARCH_HIGHLIGHT_CLASS } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/constants' import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text' import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown' import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes' +import type { ActiveSearchTarget } from '@/stores/panel/editor/store' interface Field { id: string @@ -49,6 +51,7 @@ interface FieldFormatProps { valuePlaceholder?: string descriptionPlaceholder?: string config?: any + activeSearchTarget?: ActiveSearchTarget | null } /** @@ -103,6 +106,7 @@ export function FieldFormat({ showDescription = false, valuePlaceholder = 'Enter default value', descriptionPlaceholder = 'Describe this field', + activeSearchTarget, }: FieldFormatProps) { const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId) const valueInputRefs = useRef>({}) @@ -127,6 +131,17 @@ export function FieldFormat({ const fields: Field[] = Array.isArray(value) && value.length > 0 ? value : [createDefaultField()] const isReadOnly = isPreview || disabled + const isNestedSearchHighlighted = (fieldIndex: number, fieldKey: keyof Field) => + activeSearchTarget?.subBlockId === subBlockId && + activeSearchTarget.valuePath[0] === fieldIndex && + activeSearchTarget.valuePath.at(-1) === fieldKey + + const renderFieldLabel = (label: string, highlighted: boolean) => ( + + ) + /** * Adds a new field to the list */ @@ -555,13 +570,13 @@ export function FieldFormat({
    - + {renderFieldLabel('Name', isNestedSearchHighlighted(index, 'name'))}
    {renderNameInput(field)}
    {showType && (
    - + {renderFieldLabel('Type', isNestedSearchHighlighted(index, 'type'))} - + {renderFieldLabel( + 'Description', + isNestedSearchHighlighted(index, 'description') + )} updateField(field.id, 'description', e.target.value)} @@ -585,7 +603,7 @@ export function FieldFormat({ {showValue && (
    - + {renderFieldLabel('Value', isNestedSearchHighlighted(index, 'value'))}
    {renderValueInput(field)}
    )} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx index a07564db3d2..00538358f10 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx @@ -77,6 +77,7 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { formatParameterLabel, getSubBlocksForToolInput, + getToolIdForOperation, getToolParametersConfig, isPasswordParameter, type SubBlocksForToolInput, @@ -383,36 +384,6 @@ function getOperationOptions(blockType: string): { label: string; id: string }[] }) } -/** - * Gets the correct tool ID for a given operation. - * - * @param blockType - The block type - * @param operation - The selected operation (for multi-operation tools) - * @returns The tool ID to use for execution, or `undefined` if not found - */ -function getToolIdForOperation(blockType: string, operation?: string): string | undefined { - const block = getAllBlocks().find((b) => b.type === blockType) - if (!block || !block.tools?.access) return undefined - - if (block.tools.access.length === 1) { - return block.tools.access[0] - } - - if (operation && block.tools?.config?.tool) { - try { - return block.tools.config.tool({ operation }) - } catch (error) { - logger.error('Error selecting tool for operation:', error) - } - } - - if (operation && block.tools.access.includes(operation)) { - return operation - } - - return block.tools.access[0] -} - /** * Creates a styled icon element for tool items in the selection dropdown. * diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types.ts index c46bb97c186..5ea7263e392 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types.ts @@ -1,36 +1 @@ -/** - * Represents a tool selected and configured in the workflow - * - * @remarks - * Valid types include: - * - Standard block types (e.g., 'api', 'search', 'function') - * - 'custom-tool': User-defined tools with custom code - * - 'mcp': Individual MCP tool from a connected server - * - * For custom tools (new format), we only store: type, customToolId, usageControl, isExpanded. - * Everything else (title, schema, code) is loaded dynamically from the database. - * Legacy custom tools with inline schema/code are still supported for backwards compatibility. - */ -export interface StoredTool { - /** Block type identifier */ - type: string - /** Display title for the tool (optional for new custom tool format) */ - title?: string - /** Direct tool ID for execution (optional for new custom tool format) */ - toolId?: string - /** Parameter values configured by the user */ - params?: Record - /** Whether the tool details are expanded in UI */ - isExpanded?: boolean - /** Database ID for custom tools (new format - reference only) */ - customToolId?: string - /** Tool schema for custom tools (legacy format - inline JSON schema) */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - schema?: Record - /** Implementation code for custom tools (legacy format - inline) */ - code?: string - /** Selected operation for multi-operation tools */ - operation?: string - /** Tool usage control mode for LLM */ - usageControl?: 'auto' | 'force' | 'none' -} +export type { StoredTool } from '@/lib/workflows/tool-input/types' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate.ts index 91976d7ac91..338e3b9a11f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate.ts @@ -6,6 +6,8 @@ import { useStoreWithEqualityFn } from 'zustand/traditional' import { buildCanonicalIndex, isNonEmptyValue, + normalizeDependencyValue, + parseDependsOn, resolveDependencyValue, } from '@/lib/workflows/subblocks/visibility' import { getBlock } from '@/blocks/registry' @@ -14,35 +16,6 @@ import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store' -type DependsOnConfig = string[] | { all?: string[]; any?: string[] } - -/** - * Parses dependsOn config and returns normalized all/any arrays - */ -function parseDependsOn(dependsOn: DependsOnConfig | undefined): { - allFields: string[] - anyFields: string[] - allDependsOnFields: string[] -} { - if (!dependsOn) { - return { allFields: [], anyFields: [], allDependsOnFields: [] } - } - - if (Array.isArray(dependsOn)) { - // Simple array format: all fields required (AND logic) - return { allFields: dependsOn, anyFields: [], allDependsOnFields: dependsOn } - } - - // Object format with all/any - const allFields = dependsOn.all || [] - const anyFields = dependsOn.any || [] - return { - allFields, - anyFields, - allDependsOnFields: [...allFields, ...anyFields], - } -} - /** * Centralized dependsOn gating for sub-block components. * - Computes dependency values from the active workflow/block @@ -76,29 +49,6 @@ export function useDependsOnGate( // For backward compatibility, expose flat list of all dependency fields const dependsOn = allDependsOnFields - const normalizeDependencyValue = (rawValue: unknown): unknown => { - if (rawValue === null || rawValue === undefined) return null - - if (typeof rawValue === 'object') { - if (Array.isArray(rawValue)) { - if (rawValue.length === 0) return null - return rawValue.map((item) => normalizeDependencyValue(item)) - } - - const record = rawValue as Record - if ('value' in record) { - return normalizeDependencyValue(record.value) - } - if ('id' in record) { - return record.id - } - - return record - } - - return rawValue - } - const dependencySelector = useCallback( (state: ReturnType) => { if (allDependsOnFields.length === 0) return {} as Record diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx index db50e5b3200..b69a2238c1f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx @@ -54,6 +54,8 @@ import { MODAL_REGISTRY } from '@/app/workspace/[workspaceId]/w/[workflowId]/com import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate' import type { SubBlockConfig } from '@/blocks/types' import { useWebhookManagement } from '@/hooks/use-webhook-management' +import type { ActiveSearchTarget } from '@/stores/panel/editor/store' +import { WORKFLOW_SEARCH_HIGHLIGHT_CLASS } from '../constants' const SLACK_OVERRIDES: SelectorOverrides = { transformContext: (context, deps) => { @@ -72,7 +74,17 @@ const FOLDER_OVERRIDES: SelectorOverrides = { }, } -const WORKFLOW_SEARCH_CURRENT_MATCH_CLASS = 'rounded-md bg-orange-400 px-1 py-0.5' +function hasNestedWorkflowSearchHighlight( + config: SubBlockConfig, + activeSearchTarget?: ActiveSearchTarget | null +) { + if (!activeSearchTarget || activeSearchTarget.valuePath.length === 0) return false + return ( + config.type === 'input-format' || + config.type === 'response-format' || + config.type === 'eval-input' + ) +} /** * Interface for wand control handlers exposed by sub-block inputs @@ -106,6 +118,7 @@ interface SubBlockProps { /** Provides sibling values for dependency resolution in non-preview contexts (e.g. tool-input) */ dependencyContext?: Record isSearchHighlighted?: boolean + activeSearchTarget?: ActiveSearchTarget | null } /** @@ -253,7 +266,7 @@ const renderLabel = (
    ) } + +export const MessageContent = memo(MessageContentInner) diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-chat/mothership-chat.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-chat/mothership-chat.tsx index 4693b19de4a..809c190ffea 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-chat/mothership-chat.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-chat/mothership-chat.tsx @@ -1,6 +1,6 @@ 'use client' -import { useLayoutEffect, useRef } from 'react' +import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef } from 'react' import { cn } from '@/lib/core/utils/cn' import { MessageActions } from '@/app/workspace/[workspaceId]/components' import { ChatMessageAttachments } from '@/app/workspace/[workspaceId]/home/components/chat-message-attachments' @@ -17,6 +17,9 @@ import { import { UserMessageContent } from '@/app/workspace/[workspaceId]/home/components/user-message-content' import type { ChatMessage, + ChatMessageAttachment, + ChatMessageContext, + ContentBlock, FileAttachmentForApi, MothershipResource, QueuedMessage, @@ -78,6 +81,100 @@ const LAYOUT_STYLES = { }, } as const +const EMPTY_BLOCKS: ContentBlock[] = [] + +interface UserMessageRowProps { + content: string + contexts?: ChatMessageContext[] + attachments?: ChatMessageAttachment[] + rowClassName: string + bubbleClassName: string + attachmentWidthClassName: string +} + +const UserMessageRow = memo(function UserMessageRow({ + content, + contexts, + attachments, + rowClassName, + bubbleClassName, + attachmentWidthClassName, +}: UserMessageRowProps) { + const hasAttachments = Boolean(attachments?.length) + return ( +
    + {hasAttachments && ( + + )} +
    + +
    +
    + ) +}) + +interface AssistantMessageRowProps { + message: ChatMessage + isStreaming: boolean + precedingUserContent?: string + chatId?: string + rowClassName: string + onOptionSelect?: (id: string) => void + onWorkspaceResourceSelect?: (resource: MothershipResource) => void +} + +const AssistantMessageRow = memo(function AssistantMessageRow({ + message, + isStreaming, + precedingUserContent, + chatId, + rowClassName, + onOptionSelect, + onWorkspaceResourceSelect, +}: AssistantMessageRowProps) { + const blocks = message.contentBlocks ?? EMPTY_BLOCKS + const hasAnyBlocks = blocks.length > 0 + const trimmedContent = message.content?.trim() ?? '' + + if (!hasAnyBlocks && !trimmedContent && isStreaming) { + return + } + + const hasRenderableAssistant = assistantMessageHasRenderableContent(blocks, message.content ?? '') + if (!hasRenderableAssistant && !trimmedContent && !isStreaming) { + return null + } + + const showActions = !isStreaming && (message.content || hasAnyBlocks) + + return ( +
    + + {showActions && ( +
    + +
    + )} +
    + ) +}) + export function MothershipChat({ messages, isSending, @@ -111,17 +208,31 @@ export function MothershipChat({ const { staged: stagedMessages, isStaging } = useProgressiveList(messages, stagingKey) const stagedMessageCount = stagedMessages.length const stagedOffset = messages.length - stagedMessages.length - const precedingUserContentByIndex: Array = [] - let lastUserContent: string | undefined - for (const [index, message] of messages.entries()) { - precedingUserContentByIndex[index] = lastUserContent - if (message.role === 'user') { - lastUserContent = message.content + const precedingUserContentByIndex = useMemo(() => { + const out: Array = [] + let lastUserContent: string | undefined + for (const [index, message] of messages.entries()) { + out[index] = lastUserContent + if (message.role === 'user') lastUserContent = message.content } - } + return out + }, [messages]) const initialScrollDoneRef = useRef(false) const userInputRef = useRef(null) + const onSubmitRef = useRef(onSubmit) + const onWorkspaceResourceSelectRef = useRef(onWorkspaceResourceSelect) + useEffect(() => { + onSubmitRef.current = onSubmit + onWorkspaceResourceSelectRef.current = onWorkspaceResourceSelect + }, [onSubmit, onWorkspaceResourceSelect]) + const stableOnOptionSelect = useCallback((id: string) => { + onSubmitRef.current(id) + }, []) + const stableOnWorkspaceResourceSelect = useCallback((resource: MothershipResource) => { + onWorkspaceResourceSelectRef.current?.(resource) + }, []) + function handleSendQueuedHead() { const topMessage = messageQueue[0] if (!topMessage) return @@ -164,63 +275,31 @@ export function MothershipChat({ {stagedMessages.map((msg, localIndex) => { const index = stagedOffset + localIndex if (msg.role === 'user') { - const hasAttachments = Boolean(msg.attachments?.length) return ( -
    - {hasAttachments && ( - - )} -
    - -
    -
    + ) } - const hasAnyBlocks = Boolean(msg.contentBlocks?.length) - const hasRenderableAssistant = assistantMessageHasRenderableContent( - msg.contentBlocks ?? [], - msg.content ?? '' - ) - const isLastAssistant = index === messages.length - 1 - const isThisStreaming = isStreamActive && isLastAssistant - - if (!hasAnyBlocks && !msg.content?.trim() && isThisStreaming) { - return - } - - if (!hasRenderableAssistant && !msg.content?.trim() && !isThisStreaming) { - return null - } - - const isLastMessage = index === messages.length - 1 - const precedingUserContent = precedingUserContentByIndex[index] - + const isLast = index === messages.length - 1 return ( -
    - - {!isThisStreaming && (msg.content || msg.contentBlocks?.length) && ( -
    - -
    - )} -
    + ) })}
    diff --git a/apps/sim/lib/copilot/chat/display-message.ts b/apps/sim/lib/copilot/chat/display-message.ts index 51622070009..b0e38557021 100644 --- a/apps/sim/lib/copilot/chat/display-message.ts +++ b/apps/sim/lib/copilot/chat/display-message.ts @@ -112,7 +112,18 @@ function toDisplayContexts( })) } +const displayMessageCache = new WeakMap() + +/** + * Maps a `PersistedMessage` (server wire shape) to a `ChatMessage` (UI shape). + * Reference-stable: returns the same object for a given `PersistedMessage` + * instance so `React.memo` boundaries downstream of React Query's structural + * sharing can short-circuit on identity. + */ export function toDisplayMessage(msg: PersistedMessage): ChatMessage { + const cached = displayMessageCache.get(msg) + if (cached) return cached + const display: ChatMessage = { id: msg.id, role: msg.role, @@ -136,5 +147,6 @@ export function toDisplayMessage(msg: PersistedMessage): ChatMessage { display.contexts = toDisplayContexts(msg.contexts) + displayMessageCache.set(msg, display) return display } From 50d4afd4a8f5b57585225f844dd0b5aa3718c9f2 Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 8 May 2026 16:56:06 -0700 Subject: [PATCH 23/33] fix(logs): include subfolders when filtering logs by folder (#4525) * fix(logs): include subfolders when filtering logs by folder * fix(logs): use pop() for O(1) dequeue in folder BFS * fix(logs): move folder expansion to server-only module to fix client bundle build --- apps/sim/app/api/logs/export/route.ts | 5 +++ apps/sim/app/api/logs/route.ts | 5 +++ apps/sim/app/api/logs/stats/route.ts | 8 ++++ apps/sim/lib/logs/folder-expansion.ts | 53 +++++++++++++++++++++++++++ 4 files changed, 71 insertions(+) create mode 100644 apps/sim/lib/logs/folder-expansion.ts diff --git a/apps/sim/app/api/logs/export/route.ts b/apps/sim/app/api/logs/export/route.ts index b814678caf6..2c817411b68 100644 --- a/apps/sim/app/api/logs/export/route.ts +++ b/apps/sim/app/api/logs/export/route.ts @@ -6,6 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters' +import { expandFolderIdsWithDescendants } from '@/lib/logs/folder-expansion' const logger = createLogger('LogsExportAPI') @@ -45,6 +46,10 @@ export const GET = withRouteHandler(async (request: NextRequest) => { workflowName: sql`COALESCE(${workflow.name}, 'Deleted Workflow')`, } + if (params.folderIds) { + params.folderIds = await expandFolderIdsWithDescendants(params.workspaceId, params.folderIds) + } + const workspaceCondition = eq(workflowExecutionLogs.workspaceId, params.workspaceId) const filterConditions = buildFilterConditions(params) const conditions = filterConditions diff --git a/apps/sim/app/api/logs/route.ts b/apps/sim/app/api/logs/route.ts index cb3690441d2..89f52048b72 100644 --- a/apps/sim/app/api/logs/route.ts +++ b/apps/sim/app/api/logs/route.ts @@ -32,6 +32,7 @@ import { parseRequest } from '@/lib/api/server' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { buildFilterConditions } from '@/lib/logs/filters' +import { expandFolderIdsWithDescendants } from '@/lib/logs/folder-expansion' const logger = createLogger('LogsAPI') @@ -162,6 +163,10 @@ export const GET = withRouteHandler(async (request: NextRequest) => { } } + if (params.folderIds) { + params.folderIds = await expandFolderIdsWithDescendants(params.workspaceId, params.folderIds) + } + const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: false }) if (commonFilters) workflowConditions.push(commonFilters) diff --git a/apps/sim/app/api/logs/stats/route.ts b/apps/sim/app/api/logs/stats/route.ts index 17e6a592328..930e2e36d39 100644 --- a/apps/sim/app/api/logs/stats/route.ts +++ b/apps/sim/app/api/logs/stats/route.ts @@ -14,6 +14,7 @@ import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { buildFilterConditions } from '@/lib/logs/filters' +import { expandFolderIdsWithDescendants } from '@/lib/logs/folder-expansion' const logger = createLogger('LogsStatsAPI') @@ -37,6 +38,13 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const workspaceFilter = eq(workflowExecutionLogs.workspaceId, params.workspaceId) + if (params.folderIds) { + params.folderIds = await expandFolderIdsWithDescendants( + params.workspaceId, + params.folderIds + ) + } + const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: true }) const whereCondition = commonFilters ? and(workspaceFilter, commonFilters) : workspaceFilter diff --git a/apps/sim/lib/logs/folder-expansion.ts b/apps/sim/lib/logs/folder-expansion.ts new file mode 100644 index 00000000000..1ac5c599a70 --- /dev/null +++ b/apps/sim/lib/logs/folder-expansion.ts @@ -0,0 +1,53 @@ +import { db } from '@sim/db' +import { workflowFolder } from '@sim/db/schema' +import { and, eq, isNull } from 'drizzle-orm' + +/** + * Expands a CSV of selected folder IDs to include every descendant folder in the + * workspace, so that filtering by a parent folder also matches workflows that + * live in nested subfolders. + * + * Returns the original CSV when there are no descendants (or when the input is + * empty / undefined). Unknown IDs are preserved so the caller's `inArray` check + * behaves the same as today (matches nothing). + * + * Server-only: pulls in the database client. Keep separate from `filters.ts` + * (imported by client hooks) to avoid leaking postgres into the browser bundle. + */ +export async function expandFolderIdsWithDescendants( + workspaceId: string, + folderIdsCsv: string | undefined +): Promise { + if (!folderIdsCsv) return folderIdsCsv + const seedIds = folderIdsCsv.split(',').filter(Boolean) + if (seedIds.length === 0) return folderIdsCsv + + const rows = await db + .select({ id: workflowFolder.id, parentId: workflowFolder.parentId }) + .from(workflowFolder) + .where(and(eq(workflowFolder.workspaceId, workspaceId), isNull(workflowFolder.archivedAt))) + + const childrenByParent = new Map() + for (const row of rows) { + if (!row.parentId) continue + const list = childrenByParent.get(row.parentId) + if (list) list.push(row.id) + else childrenByParent.set(row.parentId, [row.id]) + } + + const expanded = new Set(seedIds) + const queue = [...seedIds] + while (queue.length > 0) { + const current = queue.pop() as string + const children = childrenByParent.get(current) + if (!children) continue + for (const childId of children) { + if (!expanded.has(childId)) { + expanded.add(childId) + queue.push(childId) + } + } + } + + return Array.from(expanded).join(',') +} From b74f8dac4040d96581982b4e24dc1eb2d4759cea Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 8 May 2026 17:26:55 -0700 Subject: [PATCH 24/33] =?UTF-8?q?improvement(sandbox):=20expand=20document?= =?UTF-8?q?=20generation=20=E2=80=94=20style=20extraction,=20sandbox=20har?= =?UTF-8?q?dening,=20OOM=20errors,=20task=20guards=20(#4526)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * improvement(sandbox): expand document generation — style extraction, sandbox hardening, OOM errors, PPTX/DOCX/PDF task guards * fix(style): make pptx aspect-ratio regex attribute-order independent * fix(sandbox): clarify pptx null-guard message; fix bold=false inheritance sentinel in docx style extractor * chore(lint): suppress noTemplateCurlyInString in resolver tests — strings intentionally assert template literal preservation * fix(contracts): export ListWorkspaceFilesResponse type from workspace-files contract --- .../[id]/files/[fileId]/style/route.ts | 37 +- apps/sim/executor/variables/resolver.test.ts | 4 + apps/sim/lib/api/contracts/workspace-files.ts | 34 +- apps/sim/lib/copilot/vfs/document-style.ts | 372 +++++++++++++++--- apps/sim/lib/copilot/vfs/workspace-vfs.ts | 8 +- apps/sim/lib/execution/isolated-vm-worker.cjs | 68 ++-- apps/sim/sandbox-tasks/docx-generate.ts | 10 +- apps/sim/sandbox-tasks/pptx-generate.ts | 3 + 8 files changed, 427 insertions(+), 109 deletions(-) diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts index c30d0e9723f..cc68e4dc348 100644 --- a/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts @@ -16,21 +16,23 @@ const logger = createLogger('WorkspaceFileStyleAPI') /** * GET /api/workspaces/[id]/files/[fileId]/style - * Extract a compact JSON style summary from an uploaded .docx or .pptx file. - * Uses OOXML theme XML to return theme colors, font pair, and named styles. - * Only works on binary OOXML files (ZIP format) — not on JS source files. + * Extract a compact JSON style summary from an uploaded .docx, .pptx, or .pdf file. + * OOXML files return theme colors, font pair, and named styles. + * PDF files return page dimensions and embedded font names. */ +const MAX_STYLE_FILE_BYTES = 100 * 1024 * 1024 // 100 MB + export const GET = withRouteHandler( async (request: NextRequest, context: { params: Promise<{ id: string; fileId: string }> }) => { - const parsed = await parseRequest(workspaceFileStyleContract, request, context) - if (!parsed.success) return parsed.response - const { id: workspaceId, fileId } = parsed.data.params - const session = await getSession() if (!session?.user?.id) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + const parsed = await parseRequest(workspaceFileStyleContract, request, context) + if (!parsed.success) return parsed.response + const { id: workspaceId, fileId } = parsed.data.params + const membership = await verifyWorkspaceMembership(session.user.id, workspaceId) if (!membership) { return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) @@ -42,13 +44,20 @@ export const GET = withRouteHandler( } const rawExt = fileRecord.name.split('.').pop()?.toLowerCase() - if (rawExt !== 'docx' && rawExt !== 'pptx') { + if (rawExt !== 'docx' && rawExt !== 'pptx' && rawExt !== 'pdf') { return NextResponse.json( - { error: 'Style extraction only supports .docx and .pptx files' }, + { error: 'Style extraction supports .docx, .pptx, and .pdf files' }, + { status: 422 } + ) + } + const ext: 'docx' | 'pptx' | 'pdf' = rawExt + + if (fileRecord.size > MAX_STYLE_FILE_BYTES) { + return NextResponse.json( + { error: 'File is too large for style extraction (limit: 100 MB)' }, { status: 422 } ) } - const ext: 'docx' | 'pptx' = rawExt let buffer: Buffer try { @@ -66,17 +75,13 @@ export const GET = withRouteHandler( return NextResponse.json( { error: - 'File is not a compiled binary document — style extraction requires an uploaded or compiled .docx/.pptx file', + 'Could not extract style — file may be encrypted, corrupt, image-only, or contain no parseable style information', }, { status: 422 } ) } - logger.info('Extracted style summary via API', { - fileId, - format: ext, - themeName: summary.theme.name, - }) + logger.info('Extracted style summary via API', { fileId, format: ext }) return NextResponse.json(summary, { headers: { 'Cache-Control': 'private, max-age=300' }, diff --git a/apps/sim/executor/variables/resolver.test.ts b/apps/sim/executor/variables/resolver.test.ts index 8f255269661..9fe0e6273fd 100644 --- a/apps/sim/executor/variables/resolver.test.ts +++ b/apps/sim/executor/variables/resolver.test.ts @@ -127,6 +127,7 @@ describe('VariableResolver function block inputs', () => { ) expect(result.resolvedInputs.code).toBe( + // biome-ignore lint/suspicious/noTemplateCurlyInString: intentional — asserting template literal is preserved 'return `value: ${JSON.stringify(globalThis["__blockRef_0"])}`' ) expect(result.displayInputs.code).toBe('return `value: "hello world"`') @@ -139,11 +140,14 @@ describe('VariableResolver function block inputs', () => { const result = resolver.resolveInputsForFunctionBlock( ctx, 'function', + // biome-ignore lint/suspicious/noTemplateCurlyInString: intentional — asserting template literal is preserved { code: 'return `${String()}`' }, block ) + // biome-ignore lint/suspicious/noTemplateCurlyInString: intentional — asserting template literal is preserved expect(result.resolvedInputs.code).toBe('return `${String(globalThis["__blockRef_0"])}`') + // biome-ignore lint/suspicious/noTemplateCurlyInString: intentional — asserting template literal is preserved expect(result.displayInputs.code).toBe('return `${String("hello world")}`') expect(result.contextVariables).toEqual({ __blockRef_0: 'hello world' }) }) diff --git a/apps/sim/lib/api/contracts/workspace-files.ts b/apps/sim/lib/api/contracts/workspace-files.ts index b999b6ac2a5..3fc1a57d6e4 100644 --- a/apps/sim/lib/api/contracts/workspace-files.ts +++ b/apps/sim/lib/api/contracts/workspace-files.ts @@ -1,5 +1,5 @@ import { z } from 'zod' -import { type ContractJsonResponse, defineRouteContract } from '@/lib/api/contracts/types' +import { defineRouteContract } from '@/lib/api/contracts/types' export const workspaceFileScopeSchema = z.enum(['active', 'archived', 'all']) @@ -46,6 +46,12 @@ const workspaceFileSuccessSchema = z.object({ success: z.boolean(), }) +const listWorkspaceFilesResponseSchema = workspaceFileSuccessSchema.extend({ + files: z.array(workspaceFileRecordSchema), +}) + +export type ListWorkspaceFilesResponse = z.output + export const listWorkspaceFilesContract = defineRouteContract({ method: 'GET', path: '/api/workspaces/[id]/files', @@ -53,12 +59,9 @@ export const listWorkspaceFilesContract = defineRouteContract({ query: listWorkspaceFilesQuerySchema, response: { mode: 'json', - schema: workspaceFileSuccessSchema.extend({ - files: z.array(workspaceFileRecordSchema), - }), + schema: listWorkspaceFilesResponseSchema, }, }) -export type ListWorkspaceFilesResponse = ContractJsonResponse export const renameWorkspaceFileContract = defineRouteContract({ method: 'PATCH', @@ -108,15 +111,30 @@ export const updateWorkspaceFileContentContract = defineRouteContract({ const documentStyleSummarySchema = z .object({ - format: z.enum(['docx', 'pptx']), + format: z.enum(['docx', 'pptx', 'pdf']), + // OOXML theme — present for pptx, present for docx when theme1.xml exists, absent for pdf theme: z .object({ - name: z.string(), colors: z.record(z.string(), z.string()), fonts: z.object({ major: z.string(), minor: z.string() }), }) - .passthrough(), + .optional(), + // docx only styles: z.array(z.object({}).passthrough()).optional(), + defaults: z.object({ fontSize: z.number().optional(), font: z.string().optional() }).optional(), + // pdf only + pageSize: z + .object({ + preset: z.enum(['A4', 'letter', 'custom']), + widthPt: z.number().optional(), + heightPt: z.number().optional(), + }) + .optional(), + fonts: z.array(z.string()).optional(), + // pptx only + slideCount: z.number().optional(), + aspectRatio: z.enum(['16:9', '4:3', 'custom']).optional(), + background: z.string().optional(), }) .passthrough() diff --git a/apps/sim/lib/copilot/vfs/document-style.ts b/apps/sim/lib/copilot/vfs/document-style.ts index 3c1ebac6c57..7edbe202fc2 100644 --- a/apps/sim/lib/copilot/vfs/document-style.ts +++ b/apps/sim/lib/copilot/vfs/document-style.ts @@ -17,17 +17,16 @@ interface ThemeColors { accent4: string accent5: string accent6: string - hlink: string - folHlink: string } export interface DocumentStyleSummary { - format: 'docx' | 'pptx' - theme: { - name: string + format: 'docx' | 'pptx' | 'pdf' + /** OOXML theme — present for pptx; present for docx when theme1.xml exists; absent for pdf */ + theme?: { colors: Partial fonts: { major: string; minor: string } } + /** Named paragraph/character styles — docx only */ styles?: Array<{ id: string name: string @@ -37,6 +36,25 @@ export interface DocumentStyleSummary { color?: string font?: string }> + /** Document-wide default run properties (body text baseline) — docx only */ + defaults?: { + fontSize?: number + font?: string + } + /** Page dimensions — pdf only. widthPt/heightPt present only when preset is 'custom' */ + pageSize?: { + preset: 'A4' | 'letter' | 'custom' + widthPt?: number + heightPt?: number + } + /** Embedded font names extracted from page resource dictionaries — pdf only */ + fonts?: string[] + /** Number of slides — pptx only */ + slideCount?: number + /** Slide aspect ratio — pptx only */ + aspectRatio?: '16:9' | '4:3' | 'custom' + /** Slide master background hex color (no #) — pptx only, absent when background is transparent/image */ + background?: string } function attr(xml: string, name: string): string { @@ -69,8 +87,7 @@ function parseFontScheme(xml: string): { major: string; minor: string } { return { major: attr(major, 'typeface') || '', minor: attr(minor, 'typeface') || '' } } -function parseThemeXml(xml: string): DocumentStyleSummary['theme'] { - const clrSchemeMatch = /]*name="([^"]*)"/.exec(xml) +function parseThemeXml(xml: string): NonNullable { const slots: Array = [ 'dk1', 'lt1', @@ -82,63 +99,296 @@ function parseThemeXml(xml: string): DocumentStyleSummary['theme'] { 'accent4', 'accent5', 'accent6', - 'hlink', - 'folHlink', ] const colors: Partial = {} for (const slot of slots) { const hex = parseColorSlot(xml, slot) if (hex) colors[slot] = hex } - return { name: clrSchemeMatch?.[1] ?? '', colors, fonts: parseFontScheme(xml) } + return { colors, fonts: parseFontScheme(xml) } +} + +type StyleRaw = { + id: string + name: string + type: string + basedOn?: string + fontSize?: number + bold?: boolean + color?: string + font?: string + /** Raw w:asciiTheme value — resolved to a font name after parsing */ + themeFont?: string } -function parseDocxStyles(xml: string): DocumentStyleSummary['styles'] { - const targetIds = new Set([ - 'Normal', - 'DefaultParagraphFont', - 'Heading1', - 'Heading2', - 'Heading3', - 'Title', - 'Subtitle', - ]) - const results: DocumentStyleSummary['styles'] = [] - const blocks = xml.split(' + defaults?: DocumentStyleSummary['defaults'] +} { + // Extract document-default run properties (the baseline for body text) + const defaults: DocumentStyleSummary['defaults'] = {} + const docDefaultsBlock = between(xml, '', '') + if (docDefaultsBlock) { + const rPrBlock = between(docDefaultsBlock, '', '') + if (rPrBlock) { + const szMatch = /]*)>/.exec(rPrBlock) + if (fontAttrMatch) { + const { font } = parseFontAttrs(fontAttrMatch[1], themeFonts) + if (font) defaults.font = font + } + } + } + + // Build a full style map for basedOn inheritance resolution + const styleMap = new Map() + for (const block of xml.split('/.test(block) && !/]*w:ascii="([^"]*)"/.exec(block) - const font = fontMatch?.[1] - results.push({ - id: styleId, - name, - type: styleType, - ...(fontSize !== undefined && { fontSize }), - ...(bold && { bold }), - ...(color && { color }), + const fontAttrMatch = /]*)>/.exec(block) + const { font, themeFont } = fontAttrMatch ? parseFontAttrs(fontAttrMatch[1], themeFonts) : {} + + styleMap.set(id, { + id, + name: nameMatch?.[1] ?? id, + type, + ...(basedOnMatch && { basedOn: basedOnMatch[1] }), + ...(szMatch && { fontSize: Math.round(Number.parseInt(szMatch[1]) / 2) }), + ...(//.test(block) && { + bold: !/]*\bw:val=["'](0|false)["']/.test(block), + }), + ...(colorMatch && { color: colorMatch[1].toUpperCase() }), ...(font && { font }), + ...(themeFont && { themeFont }), }) } - return results + + function resolveInheritance(id: string, visited = new Set()): StyleRaw | undefined { + if (visited.has(id)) return undefined + visited.add(id) + const s = styleMap.get(id) + if (!s) return undefined + if (!s.basedOn) return s + const parent = resolveInheritance(s.basedOn, visited) + if (!parent) return s + // Own properties override parent; undefined falls through to parent + return { + ...parent, + ...s, + fontSize: s.fontSize ?? parent.fontSize, + bold: s.bold ?? parent.bold, + color: s.color ?? parent.color, + font: s.font ?? parent.font, + themeFont: s.themeFont ?? parent.themeFont, + } + } + + // Target paragraph styles (character styles excluded — generation works at paragraph level) + const targetIds: string[] = ['Normal', 'BodyText', 'Body Text', 'Title', 'Subtitle'] + for (const id of styleMap.keys()) { + // Match both 'Heading1' (Office) and 'heading1' (LibreOffice) style IDs + if (/^[Hh]eading\d/.test(id) && !targetIds.includes(id)) targetIds.push(id) + } + + const styles: NonNullable = [] + const seen = new Set() + for (const id of targetIds) { + if (seen.has(id)) continue + seen.add(id) + const resolved = resolveInheritance(id) + if (!resolved || resolved.type !== 'paragraph') continue + + // Deferred theme font resolution (only reached when themeFonts was unavailable during parse) + let resolvedFont = resolved.font + if (!resolvedFont && resolved.themeFont && themeFonts) { + resolvedFont = resolveThemeFont(resolved.themeFont, themeFonts) + } + + styles.push({ + id: resolved.id, + name: resolved.name, + type: resolved.type, + ...(resolved.fontSize !== undefined && { fontSize: resolved.fontSize }), + ...(resolved.bold !== undefined && { bold: resolved.bold }), + ...(resolved.color && { color: resolved.color }), + ...(resolvedFont && { font: resolvedFont }), + }) + } + + return { + styles, + ...(Object.keys(defaults).length > 0 && { defaults }), + } +} + +async function extractPdfStyle(buffer: Buffer): Promise { + try { + const { PDFDocument, PDFName, PDFDict } = await import('pdf-lib') + + let doc: Awaited> + try { + doc = await PDFDocument.load(buffer, { updateMetadata: false }) + } catch { + // Encrypted or corrupt + return null + } + + const pages = doc.getPages() + if (pages.length === 0) return null + + // Page dimensions (first page is canonical for preset detection) + const { width: widthPt, height: heightPt } = pages[0].getSize() + let preset: 'A4' | 'letter' | 'custom' = 'custom' + if (Math.abs(widthPt - 595.28) < 5 && Math.abs(heightPt - 841.89) < 5) preset = 'A4' + else if (Math.abs(widthPt - 612) < 5 && Math.abs(heightPt - 792) < 5) preset = 'letter' + + // Font names from page resource dictionaries (first 10 pages to bound cost) + const rawFontNames = new Set() + const pagesToScan = Math.min(pages.length, 10) + for (let i = 0; i < pagesToScan; i++) { + try { + const resourcesRef = pages[i].node.get(PDFName.of('Resources')) + if (!resourcesRef) continue + const resources = doc.context.lookup(resourcesRef, PDFDict) + if (!resources) continue + const fontDictRef = resources.get(PDFName.of('Font')) + if (!fontDictRef) continue + const fontDict = doc.context.lookup(fontDictRef, PDFDict) + if (!fontDict) continue + for (const key of fontDict.keys()) { + try { + const fontRef = fontDict.get(key) + if (!fontRef) continue + const fontObj = doc.context.lookup(fontRef, PDFDict) + if (!fontObj) continue + const baseFontRef = fontObj.get(PDFName.of('BaseFont')) + if (!baseFontRef) continue + // Format: "/ABCDEF+FontName" (subset) or "/FontName" (full embed) + const raw = baseFontRef + .toString() + .replace(/^\//, '') + .replace(/^[A-Z]{6}\+/, '') + if (raw) rawFontNames.add(raw) + } catch {} + } + } catch {} + } + + // Normalize to unique font family names by stripping PostScript weight/style suffixes. + // Apply the strip in a loop to handle compound suffixes (e.g. SemiBoldItalic, LightOblique). + // BoldMT must precede Bold, Oblique must precede the simple form, etc. + const SUFFIX_RX = + /[-]?(BoldMT|BoldOblique|BoldItalic|SemiBoldItalic|ExtraBoldItalic|LightItalic|LightOblique|MediumItalic|Regular|ExtraBold|SemiBold|Medium|Black|Light|Bold|Italic|Oblique|Condensed|Expanded|MT)$/i + const familyNames = [ + ...new Set( + [...rawFontNames].map((name) => { + let n = name + // Strip up to 3 suffix components to handle compound PostScript names + for (let i = 0; i < 3; i++) { + const stripped = n.replace(SUFFIX_RX, '').trim() + if (stripped === n) break + n = stripped + } + return n + }) + ), + ].filter(Boolean) + + // Omit exact dimensions when the preset already encodes the page size + const pageSize: DocumentStyleSummary['pageSize'] = + preset === 'custom' + ? { widthPt: Math.round(widthPt), heightPt: Math.round(heightPt), preset } + : { preset } + + return { + format: 'pdf', + pageSize, + ...(familyNames.length > 0 && { fonts: familyNames }), + } + } catch (err) { + logger.warn('Failed to extract PDF style', { error: toError(err).message }) + return null + } +} + +function parsePptxPresentation(xml: string): { + slideCount: number + aspectRatio: '16:9' | '4:3' | 'custom' +} { + // Count sldId elements inside sldIdLst + const sldIdLst = between(xml, '', '') + const slideCount = (sldIdLst.match(/]*\bcx="(\d+)"/.exec(xml) + const cyMatch = /]*\bcy="(\d+)"/.exec(xml) + let aspectRatio: '16:9' | '4:3' | 'custom' = 'custom' + if (cxMatch && cyMatch) { + const cx = Number.parseInt(cxMatch[1]) + const cy = Number.parseInt(cyMatch[1]) + const ratio = cx / cy + if (Math.abs(ratio - 16 / 9) < 0.01) aspectRatio = '16:9' + else if (Math.abs(ratio - 4 / 3) < 0.01) aspectRatio = '4:3' + } + + return { slideCount, aspectRatio } +} + +function parseSlideMasterBackground(xml: string): string | undefined { + // Look for a solid fill color in the slide master background + const bgBlock = between(xml, '', '') + if (!bgBlock) return undefined + // solidFill with srgbClr + const srgbMatch = /]*\bval="([A-Fa-f0-9]{6})"/.exec(bgBlock) + if (srgbMatch) return srgbMatch[1].toUpperCase() + // solidFill with sysClr fallback + const sysMatch = /]*\blastClr="([A-Fa-f0-9]{6})"/.exec(bgBlock) + if (sysMatch) return sysMatch[1].toUpperCase() + return undefined } /** - * Extract a compact style summary from a binary OOXML (.docx or .pptx) buffer. - * Returns null if the buffer is not a valid ZIP/OOXML file. + * Extract a compact style summary from a binary document buffer. + * Supports .docx and .pptx (OOXML/ZIP) and .pdf. + * Returns null if the buffer cannot be parsed or yields no useful data. */ export async function extractDocumentStyle( buffer: Buffer, - ext: 'docx' | 'pptx' + ext: 'docx' | 'pptx' | 'pdf' ): Promise { + if (ext === 'pdf') { + return extractPdfStyle(buffer) + } + if (buffer.length < 4) return null for (let i = 0; i < 4; i++) { if (buffer[i] !== ZIP_MAGIC[i]) return null @@ -150,16 +400,42 @@ export async function extractDocumentStyle( const themePath = ext === 'docx' ? 'word/theme/theme1.xml' : 'ppt/theme/theme1.xml' const themeFile = zip.file(themePath) - if (!themeFile) return null - const theme = parseThemeXml(await themeFile.async('string')) - const summary: DocumentStyleSummary = { format: ext, theme } + let theme: DocumentStyleSummary['theme'] + if (themeFile) { + theme = parseThemeXml(await themeFile.async('string')) + } else if (ext === 'pptx') { + // PPTX without a theme is malformed — nothing useful to return + return null + } + // DOCX without a theme is valid (e.g. LibreOffice-generated); continue with styles only + + const summary: DocumentStyleSummary = { format: ext, ...(theme && { theme }) } if (ext === 'docx') { const stylesFile = zip.file('word/styles.xml') if (stylesFile) { - const styles = parseDocxStyles(await stylesFile.async('string')) - if (styles && styles.length > 0) summary.styles = styles + const { styles, defaults } = parseDocxStyles(await stylesFile.async('string'), theme?.fonts) + if (styles.length > 0) summary.styles = styles + if (defaults) summary.defaults = defaults + } + // If there's neither a theme nor any styles, there's nothing useful to return + if (!theme && !summary.styles?.length) return null + } + + if (ext === 'pptx') { + const presFile = zip.file('ppt/presentation.xml') + if (presFile) { + const { slideCount, aspectRatio } = parsePptxPresentation(await presFile.async('string')) + if (slideCount > 0) summary.slideCount = slideCount + summary.aspectRatio = aspectRatio + } + const masterFile = + zip.file('ppt/slideMasters/slideMaster1.xml') ?? + zip.file('ppt/slidemaster/slidemaster1.xml') + if (masterFile) { + const bg = parseSlideMasterBackground(await masterFile.async('string')) + if (bg) summary.background = bg } } diff --git a/apps/sim/lib/copilot/vfs/workspace-vfs.ts b/apps/sim/lib/copilot/vfs/workspace-vfs.ts index 5ab975876c7..6e5cd70bb7d 100644 --- a/apps/sim/lib/copilot/vfs/workspace-vfs.ts +++ b/apps/sim/lib/copilot/vfs/workspace-vfs.ts @@ -316,7 +316,7 @@ function getStaticComponentFiles(): Map { * tables/{name}/meta.json * files/{name}/meta.json * files/by-id/{id}/meta.json - * files/by-id/{id}/style (dynamic — OOXML theme/font extraction for .docx/.pptx) + * files/by-id/{id}/style (dynamic — style extraction for .docx/.pptx/.pdf) * files/by-id/{id}/compiled-check (dynamic — compile generated source / validate diagrams, returns {ok,error?}) * jobs/{title}/meta.json * jobs/{title}/history.json @@ -457,7 +457,7 @@ export class WorkspaceVFS { * Attempt to read dynamic workspace file content from storage. * Handles images (base64), parseable documents (PDF, etc.), and text files. * Also handles: - * `files/by-id/{id}/style` — OOXML theme/style extraction (.docx / .pptx only) + * `files/by-id/{id}/style` — style extraction (.docx / .pptx / .pdf) * `files/by-id/{id}/compiled-check` — compile JS-source binary files or validate Mermaid diagrams * Returns null if the path doesn't match `files/{name}` / `files/by-id/{id}` or the file isn't found. */ @@ -518,8 +518,8 @@ export class WorkspaceVFS { const record = await getWorkspaceFile(this._workspaceId, fileId) if (!record) return null const rawExt = record.name.split('.').pop()?.toLowerCase() - if (rawExt !== 'docx' && rawExt !== 'pptx') return null - const ext: 'docx' | 'pptx' = rawExt + if (rawExt !== 'docx' && rawExt !== 'pptx' && rawExt !== 'pdf') return null + const ext: 'docx' | 'pptx' | 'pdf' = rawExt const buffer = await fetchWorkspaceFileBuffer(record) const summary = await extractDocumentStyle(buffer, ext) if (!summary) return null diff --git a/apps/sim/lib/execution/isolated-vm-worker.cjs b/apps/sim/lib/execution/isolated-vm-worker.cjs index aa23858e151..5f43c731402 100644 --- a/apps/sim/lib/execution/isolated-vm-worker.cjs +++ b/apps/sim/lib/execution/isolated-vm-worker.cjs @@ -376,6 +376,24 @@ async function executeCode(request, executionId) { stack: err.stack, } + // OOM check must run before the isDisposed guard: isolate OOM auto-disposes + // the isolate (isDisposed becomes true), so the cancel branch would fire first + // and mask the real cause. Message-based detection disambiguates the two. + if ( + err.message.includes('Array buffer allocation failed') || + err.message.includes('memory limit') + ) { + return { + result: null, + stdout, + error: { + message: + 'Execution exceeded memory limit (256 MB). Reduce image sizes or split the work into smaller batches.', + name: 'MemoryLimitError', + }, + } + } + // Host sent a `cancel` IPC which called `isolate.dispose()`. Any // in-flight compileScript/run then throws; detect that authoritatively // via the isolate flag rather than fuzzy-matching the error message. @@ -398,21 +416,6 @@ async function executeCode(request, executionId) { } } - if ( - err.message.includes('Array buffer allocation failed') || - err.message.includes('memory limit') - ) { - return { - result: null, - stdout, - error: { - message: - 'Execution exceeded memory limit (256 MB). Reduce image sizes or split the work into smaller batches.', - name: 'MemoryLimitError', - }, - } - } - return { result: null, stdout, @@ -930,6 +933,25 @@ async function executeTask(request, executionId) { timings.total = Date.now() - tStart if (err instanceof Error) { const errorInfo = { message: err.message, name: err.name, stack: err.stack } + // OOM check must run before the isDisposed guard: isolate OOM auto-disposes + // the isolate (isDisposed becomes true), so the cancel branch would fire first + // and mask the real cause. Message-based detection disambiguates the two. + if ( + err.message?.includes('Array buffer allocation failed') || + err.message?.includes('memory limit') + ) { + return { + result: null, + stdout, + error: { + message: + 'Execution exceeded memory limit (256 MB). Reduce image sizes or split the work into smaller batches.', + name: 'MemoryLimitError', + }, + timings, + } + } + // Cancellation: host sent `cancel` IPC which called `isolate.dispose()`. // Detect authoritatively via the isolate flag so we don't depend on // isolated-vm's internal error wording. @@ -953,22 +975,6 @@ async function executeTask(request, executionId) { } } - if ( - err.message?.includes('Array buffer allocation failed') || - err.message?.includes('memory limit') - ) { - return { - result: null, - stdout, - error: { - message: - 'Execution exceeded memory limit (256 MB). Reduce image sizes or split the work into smaller batches.', - name: 'MemoryLimitError', - }, - timings, - } - } - return { result: null, stdout, diff --git a/apps/sim/sandbox-tasks/docx-generate.ts b/apps/sim/sandbox-tasks/docx-generate.ts index 214b9f8f41f..d93954d923c 100644 --- a/apps/sim/sandbox-tasks/docx-generate.ts +++ b/apps/sim/sandbox-tasks/docx-generate.ts @@ -15,6 +15,9 @@ export const docxGenerateTask = defineSandboxTask({ globalThis.addSection = (section) => { globalThis.__docxSections.push(section); }; + // Set globalThis.__docxDocOptions = { styles: {...}, numbering: {...} } in chunk 1 + // to configure document-wide styles and numbering in chunked (addSection) mode. + globalThis.__docxDocOptions = null; // Page geometry constants (twips, 1 twip = 1/1440 inch) for US Letter globalThis.PAGE_W = 12240; // 8.5" @@ -79,10 +82,13 @@ export const docxGenerateTask = defineSandboxTask({ finalize: ` let doc = globalThis.doc; if (!doc && globalThis.__docxSections.length > 0) { - doc = new globalThis.docx.Document({ sections: globalThis.__docxSections }); + doc = new globalThis.docx.Document({ + ...(globalThis.__docxDocOptions || {}), + sections: globalThis.__docxSections, + }); } if (!doc) { - throw new Error('No document created. Use addSection({ children: [...] }) for chunked writes, or set doc = new docx.Document({...}) for a single write.'); + throw new Error('No document created. Use addSection({ children: [...] }) for chunked writes, or set globalThis.doc = new docx.Document({...}) for a single write.'); } const b64 = await globalThis.docx.Packer.toBase64String(doc); const alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; diff --git a/apps/sim/sandbox-tasks/pptx-generate.ts b/apps/sim/sandbox-tasks/pptx-generate.ts index 986954da8d6..f31fcb9a1f1 100644 --- a/apps/sim/sandbox-tasks/pptx-generate.ts +++ b/apps/sim/sandbox-tasks/pptx-generate.ts @@ -60,6 +60,9 @@ export const pptxGenerateTask = defineSandboxTask({ }; `, finalize: ` + if (!globalThis.pptx) { + throw new Error('No presentation found. globalThis.pptx was overwritten — use the pre-initialized instance and call addSlide() on it to build your presentation.'); + } const bytes = await globalThis.pptx.write({ outputType: 'uint8array' }); return bytes; `, From d0b0ede39ea5587dbb05a0ebda02935222eead17 Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Fri, 8 May 2026 22:22:48 -0700 Subject: [PATCH 25/33] fix(mothership): misc ui bugs (#4528) * fix(vfs); compiled check tool call natural language tool desc * fix few more ui/ux bugs --- .../components/plus-menu-dropdown.tsx | 2 + .../[workspaceId]/home/hooks/use-chat.test.ts | 234 +++++++++++++ .../[workspaceId]/home/hooks/use-chat.ts | 318 ++++++++++++++++-- .../copilot/tools/client/store-utils.test.ts | 32 ++ .../lib/copilot/tools/client/store-utils.ts | 28 +- .../copilot/tools/handlers/resources.test.ts | 70 ++++ .../lib/copilot/tools/handlers/resources.ts | 3 - .../workspace/workspace-file-manager.ts | 2 +- 8 files changed, 657 insertions(+), 32 deletions(-) create mode 100644 apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.test.ts create mode 100644 apps/sim/lib/copilot/tools/handlers/resources.test.ts diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/components/plus-menu-dropdown.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/components/plus-menu-dropdown.tsx index 5403aa76fbf..077727a005a 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/components/plus-menu-dropdown.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/components/plus-menu-dropdown.tsx @@ -89,6 +89,7 @@ export const PlusMenuDropdown = React.memo( items.filter((item) => item.name.toLowerCase().includes(q)).map((item) => ({ type, item })) ) }, [isMention, mentionQuery, search, availableResources]) + const isRootMenu = !isMention && filteredItems === null const filteredItemsRef = useRef(filteredItems) filteredItemsRef.current = filteredItems @@ -248,6 +249,7 @@ export const PlusMenuDropdown = React.memo( collisionPadding={8} className={cn( 'flex flex-col overflow-hidden', + isRootMenu && 'max-h-none', // Plus-click shows short fixed labels (Workflows, Tables, …) — let it size // to its content via the emcn DropdownMenuContent default max-w. // Mention mode renders resource names directly, so widen for breathing room. diff --git a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.test.ts b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.test.ts new file mode 100644 index 00000000000..df2631a16f7 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.test.ts @@ -0,0 +1,234 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it, vi } from 'vitest' +import type { PersistedMessage } from '@/lib/copilot/chat/persisted-message' +import { + MothershipStreamV1EventType, + MothershipStreamV1ToolPhase, +} from '@/lib/copilot/generated/mothership-stream-v1' +import type { StreamBatchEvent } from '@/lib/copilot/request/session/types' +import { + getReplayCompletedWorkflowToolCallIds, + reconcileLiveAssistantTurn, + selectReconnectReplayState, +} from '@/app/workspace/[workspaceId]/home/hooks/use-chat' +import type { ContentBlock } from '@/app/workspace/[workspaceId]/home/types' + +vi.mock('next/navigation', () => ({ + usePathname: () => '/workspace/workspace-1/home', + useRouter: () => ({ + push: vi.fn(), + replace: vi.fn(), + refresh: vi.fn(), + }), +})) + +function userMessage(id: string): PersistedMessage { + return { + id, + role: 'user', + content: 'Question', + timestamp: '2026-05-08T00:00:00.000Z', + } +} + +function assistantMessage(id: string, content: string): PersistedMessage { + return { + id, + role: 'assistant', + content, + timestamp: '2026-05-08T00:00:01.000Z', + } +} + +function toolBatchEvent( + eventId: number, + toolCallId: string, + toolName: string, + phase: MothershipStreamV1ToolPhase +): StreamBatchEvent { + return { + eventId, + streamId: 'stream-1', + event: { + v: 1, + seq: eventId, + ts: '2026-05-08T00:00:00.000Z', + type: MothershipStreamV1EventType.tool, + stream: { streamId: 'stream-1' }, + payload: { + phase, + toolCallId, + toolName, + }, + }, + } as StreamBatchEvent +} + +describe('reconcileLiveAssistantTurn', () => { + it('replaces the live assistant for the active stream owner', () => { + const liveAssistant = assistantMessage('live-assistant:stream-1', 'updated') + const messages = [userMessage('stream-1'), assistantMessage('live-assistant:stream-1', 'old')] + + const result = reconcileLiveAssistantTurn({ + messages, + streamId: 'stream-1', + liveAssistant, + activeStreamId: 'stream-1', + }) + + expect(result).toEqual([userMessage('stream-1'), liveAssistant]) + }) + + it('replaces the generated assistant after the owner while the stream is active', () => { + const liveAssistant = assistantMessage('live-assistant:stream-1', 'live content') + + const result = reconcileLiveAssistantTurn({ + messages: [userMessage('stream-1'), assistantMessage('final-1', 'persisted content')], + streamId: 'stream-1', + liveAssistant, + activeStreamId: 'stream-1', + }) + + expect(result).toEqual([userMessage('stream-1'), liveAssistant]) + }) + + it('leaves a terminal persisted assistant alone when the stream is no longer active', () => { + const messages = [userMessage('stream-1'), assistantMessage('final-1', 'persisted content')] + + const result = reconcileLiveAssistantTurn({ + messages, + streamId: 'stream-1', + liveAssistant: assistantMessage('live-assistant:stream-1', 'stale live content'), + activeStreamId: null, + }) + + expect(result).toBe(messages) + }) + + it('removes stale live assistant duplicates when a terminal persisted assistant exists', () => { + const finalAssistant = assistantMessage('final-1', 'persisted content') + const staleLiveAssistant = assistantMessage('live-assistant:stream-1', 'stale live content') + + const result = reconcileLiveAssistantTurn({ + messages: [ + userMessage('stream-1'), + finalAssistant, + userMessage('next-user'), + staleLiveAssistant, + ], + streamId: 'stream-1', + liveAssistant: staleLiveAssistant, + activeStreamId: null, + }) + + expect(result).toEqual([userMessage('stream-1'), finalAssistant, userMessage('next-user')]) + }) + + it('inserts the live assistant immediately after its owner', () => { + const nextUser = userMessage('next-user') + const liveAssistant = assistantMessage('live-assistant:stream-1', 'live content') + + const result = reconcileLiveAssistantTurn({ + messages: [userMessage('stream-1'), nextUser], + streamId: 'stream-1', + liveAssistant, + activeStreamId: 'stream-1', + }) + + expect(result).toEqual([userMessage('stream-1'), liveAssistant, nextUser]) + }) +}) + +describe('selectReconnectReplayState', () => { + it('hydrates nonzero cursor replay from a cached live assistant that is ahead', () => { + const cachedBlock: ContentBlock = { type: 'text', content: 'Hello world' } + + const result = selectReconnectReplayState({ + afterCursor: '4', + cachedLiveAssistant: { + content: 'Hello world', + contentBlocks: [cachedBlock], + }, + currentContent: 'Hello', + currentBlocks: [], + }) + + expect(result).toEqual({ + afterCursor: '4', + content: 'Hello world', + contentBlocks: [cachedBlock], + preserveExistingState: true, + source: 'cache', + }) + }) + + it('resets to replay from the beginning when a nonzero cursor has no usable live cache', () => { + const result = selectReconnectReplayState({ + afterCursor: '4', + cachedLiveAssistant: null, + currentContent: '', + currentBlocks: [], + }) + + expect(result).toEqual({ + afterCursor: '0', + content: '', + contentBlocks: [], + preserveExistingState: false, + source: 'reset', + }) + }) + + it('resets when cached live content diverges from the local prefix', () => { + const result = selectReconnectReplayState({ + afterCursor: '4', + cachedLiveAssistant: { + content: 'Goodbye world', + contentBlocks: [{ type: 'text', content: 'Goodbye world' }], + }, + currentContent: 'Hello', + currentBlocks: [{ type: 'text', content: 'Hello' }], + }) + + expect(result).toEqual({ + afterCursor: '0', + content: '', + contentBlocks: [], + preserveExistingState: false, + source: 'reset', + }) + }) + + it('resets current state for cursor zero replay', () => { + const currentBlock: ContentBlock = { type: 'text', content: 'Hello' } + + const result = selectReconnectReplayState({ + afterCursor: '0', + cachedLiveAssistant: null, + currentContent: 'Hello', + currentBlocks: [currentBlock], + }) + + expect(result).toEqual({ + afterCursor: '0', + content: '', + contentBlocks: [], + preserveExistingState: false, + source: 'reset', + }) + }) +}) + +describe('getReplayCompletedWorkflowToolCallIds', () => { + it('suppresses only workflow tool starts that already have results in the replay batch', () => { + const result = getReplayCompletedWorkflowToolCallIds([ + toolBatchEvent(1, 'workflow-active', 'run_workflow', MothershipStreamV1ToolPhase.call), + toolBatchEvent(2, 'search-complete', 'tool_search', MothershipStreamV1ToolPhase.result), + toolBatchEvent(3, 'workflow-complete', 'run_workflow', MothershipStreamV1ToolPhase.result), + ]) + + expect(result).toEqual(new Set(['workflow-complete'])) + }) +}) diff --git a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts index 8e6d5bc49d2..4e7d5138e69 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts +++ b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts @@ -1140,6 +1140,167 @@ function isAlreadyProcessedStreamCursor( ) } +function isZeroStreamCursor(cursor: string): boolean { + const sequence = Number(cursor) + return Number.isFinite(sequence) && sequence <= 0 +} + +function isPersistedAssistantMessage(message: PersistedMessage, liveAssistantId: string): boolean { + return ( + message.role === 'assistant' && + message.id !== liveAssistantId && + !message.id.startsWith('live-assistant:') + ) +} + +function findStreamOwnerIndex(messages: PersistedMessage[], streamId: string): number { + return messages.findIndex((message) => message.role === 'user' && message.id === streamId) +} + +function findAssistantAfterOwner(messages: PersistedMessage[], ownerIndex: number): number { + for (let index = ownerIndex + 1; index < messages.length; index++) { + const message = messages[index] + if (message.role === 'user') return -1 + if (message.role === 'assistant') return index + } + return -1 +} + +function hasTerminalPersistedAssistantForStream( + messages: PersistedMessage[], + streamId: string, + liveAssistantId: string +): boolean { + const ownerIndex = findStreamOwnerIndex(messages, streamId) + if (ownerIndex === -1) return false + + const assistantIndex = findAssistantAfterOwner(messages, ownerIndex) + if (assistantIndex === -1) return false + + return isPersistedAssistantMessage(messages[assistantIndex], liveAssistantId) +} + +export function reconcileLiveAssistantTurn(params: { + messages: PersistedMessage[] + streamId: string + liveAssistant: PersistedMessage + activeStreamId: string | null +}): PersistedMessage[] { + const { messages, streamId, liveAssistant, activeStreamId } = params + const ownerIndex = findStreamOwnerIndex(messages, streamId) + if (ownerIndex === -1) { + return [...messages.filter((message) => message.id !== liveAssistant.id), liveAssistant] + } + + const assistantIndex = findAssistantAfterOwner(messages, ownerIndex) + const existingAssistant = assistantIndex >= 0 ? messages[assistantIndex] : undefined + if ( + activeStreamId !== streamId && + existingAssistant && + isPersistedAssistantMessage(existingAssistant, liveAssistant.id) + ) { + const withoutStaleLiveAssistant = messages.filter((message) => message.id !== liveAssistant.id) + return withoutStaleLiveAssistant.length === messages.length + ? messages + : withoutStaleLiveAssistant + } + + const withoutDuplicateLiveAssistant = messages.filter( + (message, index) => index === assistantIndex || message.id !== liveAssistant.id + ) + const adjustedOwnerIndex = withoutDuplicateLiveAssistant.findIndex( + (message) => message.role === 'user' && message.id === streamId + ) + const adjustedAssistantIndex = + adjustedOwnerIndex >= 0 + ? findAssistantAfterOwner(withoutDuplicateLiveAssistant, adjustedOwnerIndex) + : -1 + + if (adjustedAssistantIndex >= 0) { + return withoutDuplicateLiveAssistant.map((message, index) => + index === adjustedAssistantIndex ? liveAssistant : message + ) + } + + if (adjustedOwnerIndex >= 0) { + return [ + ...withoutDuplicateLiveAssistant.slice(0, adjustedOwnerIndex + 1), + liveAssistant, + ...withoutDuplicateLiveAssistant.slice(adjustedOwnerIndex + 1), + ] + } + + return [...withoutDuplicateLiveAssistant, liveAssistant] +} + +export interface ReconnectReplaySelection { + afterCursor: string + content: string + contentBlocks: ContentBlock[] + preserveExistingState: boolean + source: 'cache' | 'reset' +} + +export function selectReconnectReplayState(params: { + afterCursor: string + cachedLiveAssistant?: Pick | null + currentContent: string + currentBlocks: ContentBlock[] +}): ReconnectReplaySelection { + const { afterCursor, cachedLiveAssistant, currentContent, currentBlocks } = params + if (isZeroStreamCursor(afterCursor)) { + return { + afterCursor, + content: '', + contentBlocks: [], + preserveExistingState: false, + source: 'reset', + } + } + + const cachedContent = cachedLiveAssistant?.content ?? '' + const cachedBlocks = cachedLiveAssistant?.contentBlocks ?? [] + const cachedHasLiveState = cachedContent.length > 0 || cachedBlocks.length > 0 + const cachedIsAhead = + cachedHasLiveState && + cachedContent.length >= currentContent.length && + cachedContent.startsWith(currentContent) && + cachedBlocks.length >= currentBlocks.length + + if (cachedIsAhead) { + return { + afterCursor, + content: cachedContent, + contentBlocks: [...cachedBlocks], + preserveExistingState: true, + source: 'cache', + } + } + + return { + afterCursor: '0', + content: '', + contentBlocks: [], + preserveExistingState: false, + source: 'reset', + } +} + +export function getReplayCompletedWorkflowToolCallIds(events: StreamBatchEvent[]): Set { + const completedToolCallIds = new Set() + for (const entry of events) { + const event = entry.event + if (event.type !== MothershipStreamV1EventType.tool) continue + const payload = event.payload + if (!('phase' in payload)) continue + if (payload.phase !== MothershipStreamV1ToolPhase.result) continue + if (typeof payload.toolCallId === 'string' && isWorkflowToolName(payload.toolName)) { + completedToolCallIds.add(payload.toolCallId) + } + } + return completedToolCallIds +} + function buildRecoverySubjectKey( chatId: string | undefined, selectedChatId: string | undefined @@ -1556,7 +1717,7 @@ export function useChat( expectedGen?: number, options?: { preserveExistingState?: boolean - suppressWorkflowToolStarts?: boolean + suppressedWorkflowToolStartIds?: ReadonlySet targetChatId?: string shouldContinue?: () => boolean } @@ -1735,6 +1896,45 @@ export function useChat( streamingBlocksRef.current = [] }, []) + const applyReconnectReplaySelection = useCallback( + ( + streamId: string, + assistantId: string, + afterCursor: string, + options?: { targetChatId?: string; chatHistory?: TaskChatHistory } + ): ReconnectReplaySelection => { + const cachedHistory = + options?.chatHistory ?? + (options?.targetChatId + ? queryClient.getQueryData(taskKeys.detail(options.targetChatId)) + : undefined) + const cachedLiveAssistant = cachedHistory?.messages.find( + (message) => message.id === assistantId + ) + const selection = selectReconnectReplayState({ + afterCursor, + cachedLiveAssistant: cachedLiveAssistant ? toDisplayMessage(cachedLiveAssistant) : null, + currentContent: streamingContentRef.current, + currentBlocks: streamingBlocksRef.current, + }) + + streamingContentRef.current = selection.content + streamingBlocksRef.current = selection.contentBlocks + lastCursorRef.current = selection.afterCursor + + if (selection.afterCursor === '0' && afterCursor !== '0') { + logger.info('Resetting stream replay cursor after reconnect state mismatch', { + streamId, + targetChatId: options?.targetChatId ?? cachedHistory?.id, + previousCursor: afterCursor, + }) + } + + return selection + }, + [queryClient] + ) + const clearActiveTurn = useCallback(() => { activeTurnRef.current = null pendingUserMsgRef.current = null @@ -2075,12 +2275,32 @@ export function useChat( const previousStreamId = streamIdRef.current ?? activeTurnRef.current?.userMessageId const reconnectAfterCursor = previousStreamId === activeStreamId ? lastCursorRef.current || '0' : '0' + cancelActiveStreamRecovery() + const replacedController = abortControllerRef.current + if (replacedController && !replacedController.signal.aborted) { + replacedController.abort('superseded_chat_history_reconnect') + } + cancelActiveStreamReader() abortControllerRef.current = abortController streamIdRef.current = activeStreamId - lastCursorRef.current = reconnectAfterCursor setTransportReconnecting() const assistantId = getLiveAssistantMessageId(activeStreamId) + let snapshotReplayAfterCursor: string + if (snapshotEvents.length > 0) { + streamingContentRef.current = '' + streamingBlocksRef.current = [] + lastCursorRef.current = '0' + snapshotReplayAfterCursor = '0' + } else { + const replaySelection = applyReconnectReplaySelection( + activeStreamId, + assistantId, + reconnectAfterCursor, + { targetChatId: chatHistory.id, chatHistory } + ) + snapshotReplayAfterCursor = replaySelection.afterCursor + } const reconnect = async () => { const initialSnapshot = chatHistory.streamSnapshot @@ -2091,7 +2311,8 @@ export function useChat( let reconnectResult: Awaited> | null = null const replaySnapshotEvents = snapshotEvents.filter( - (entry) => !isAlreadyProcessedStreamCursor(String(entry.eventId), reconnectAfterCursor) + (entry) => + !isAlreadyProcessedStreamCursor(String(entry.eventId), snapshotReplayAfterCursor) ) if (replaySnapshotEvents.length > 0) { try { @@ -2105,7 +2326,7 @@ export function useChat( previewSessions: snapshotPreviewSessions, status: initialSnapshot?.status ?? 'unknown', }, - afterCursor: reconnectAfterCursor, + afterCursor: snapshotReplayAfterCursor, targetChatId: chatHistory.id, }) } catch (error) { @@ -2150,9 +2371,12 @@ export function useChat( }, [ chatHistory, workspaceId, + cancelActiveStreamReader, + cancelActiveStreamRecovery, queryClient, recoverPendingClientWorkflowTools, seedPreviewSessions, + applyReconnectReplaySelection, setTransportIdle, setTransportReconnecting, ]) @@ -2164,7 +2388,7 @@ export function useChat( expectedGen?: number, options?: { preserveExistingState?: boolean - suppressWorkflowToolStarts?: boolean + suppressedWorkflowToolStartIds?: ReadonlySet targetChatId?: string shouldContinue?: () => boolean } @@ -2372,14 +2596,27 @@ export function useChat( contentBlocks: blocks, ...(streamRequestId ? { requestId: streamRequestId } : {}), }) - upsertTaskChatHistory(activeChatId, (current) => ({ - ...current, - messages: [ - ...current.messages.filter((message) => message.id !== assistantId), - assistantMessage, - ], - activeStreamId: streamIdRef.current ?? current.activeStreamId, - })) + upsertTaskChatHistory(activeChatId, (current) => { + const streamId = streamIdRef.current ?? current.activeStreamId ?? assistantId + const terminalPersistedAssistantExists = + current.activeStreamId !== streamId && + hasTerminalPersistedAssistantForStream(current.messages, streamId, assistantMessage.id) + const reconciledMessages = reconcileLiveAssistantTurn({ + messages: current.messages, + streamId, + liveAssistant: assistantMessage, + activeStreamId: current.activeStreamId, + }) + const skippedTerminalLiveWrite = reconciledMessages === current.messages + return { + ...current, + messages: reconciledMessages, + activeStreamId: + skippedTerminalLiveWrite || terminalPersistedAssistantExists + ? current.activeStreamId + : (streamIdRef.current ?? current.activeStreamId), + } + }) } const flushText = () => { @@ -2951,7 +3188,7 @@ export function useChat( if (isWorkflowToolName(name) && !isPartial) { const shouldStartWorkflowTool = - !options?.suppressWorkflowToolStarts && + !options?.suppressedWorkflowToolStartIds?.has(id) && (isNewToolCall || (existingToolCall?.status === ToolCallStatus.executing && !existingToolCall.result)) @@ -3392,10 +3629,6 @@ export function useChat( targetChatId, shouldContinue, } = opts - let latestCursor = afterCursor - let seedEvents = opts.initialBatch?.events ?? [] - let streamStatus = opts.initialBatch?.status ?? 'unknown' - let suppressSeedWorkflowStarts = seedEvents.length > 0 const isStaleReconnect = () => streamGenRef.current !== expectedGen || @@ -3406,6 +3639,20 @@ export function useChat( return { error: false, aborted: true } } + const initialReplaySelection: Pick< + ReconnectReplaySelection, + 'afterCursor' | 'preserveExistingState' + > = opts.initialBatch + ? { afterCursor, preserveExistingState: true } + : applyReconnectReplaySelection(streamId, assistantId, afterCursor, { + ...(targetChatId ? { targetChatId } : {}), + }) + let latestCursor = initialReplaySelection.afterCursor + let preserveNextReplayState = initialReplaySelection.preserveExistingState + let seedEvents = opts.initialBatch?.events ?? [] + let streamStatus = opts.initialBatch?.status ?? 'unknown' + let suppressedSeedWorkflowToolStartIds = getReplayCompletedWorkflowToolCallIds(seedEvents) + setTransportReconnecting() setError(null) @@ -3417,8 +3664,8 @@ export function useChat( assistantId, expectedGen, { - preserveExistingState: true, - suppressWorkflowToolStarts: suppressSeedWorkflowStarts, + preserveExistingState: preserveNextReplayState, + suppressedWorkflowToolStartIds: suppressedSeedWorkflowToolStartIds, ...(targetChatId ? { targetChatId } : {}), ...(shouldContinue ? { shouldContinue } : {}), } @@ -3429,7 +3676,8 @@ export function useChat( latestCursor = String(seedEvents[seedEvents.length - 1]?.eventId ?? latestCursor) lastCursorRef.current = latestCursor seedEvents = [] - suppressSeedWorkflowStarts = false + preserveNextReplayState = true + suppressedSeedWorkflowToolStartIds = new Set() if (replayResult.sawStreamError) { return { error: true, aborted: false } @@ -3475,11 +3723,12 @@ export function useChat( assistantId, expectedGen, { - preserveExistingState: true, + preserveExistingState: preserveNextReplayState, ...(targetChatId ? { targetChatId } : {}), ...(shouldContinue ? { shouldContinue } : {}), } ) + preserveNextReplayState = true if (liveResult.sawStreamError) { return { error: true, aborted: false } @@ -3509,6 +3758,7 @@ export function useChat( seedStreamBatchPreviewSessions(batch) seedEvents = batch.events streamStatus = batch.status + suppressedSeedWorkflowToolStartIds = getReplayCompletedWorkflowToolCallIds(seedEvents) if (batch.events.length > 0) { latestCursor = String(batch.events[batch.events.length - 1].eventId) @@ -3538,6 +3788,7 @@ export function useChat( } }, [ + applyReconnectReplaySelection, fetchStreamBatch, seedStreamBatchPreviewSessions, setTransportIdle, @@ -3559,7 +3810,12 @@ export function useChat( }): Promise => { const { streamId, assistantId, gen, afterCursor, signal, targetChatId, shouldContinue } = opts - const batch = await fetchStreamBatch(streamId, afterCursor, signal) + if (streamGenRef.current !== gen || signal?.aborted || shouldContinue?.() === false) return + + const replaySelection = applyReconnectReplaySelection(streamId, assistantId, afterCursor, { + ...(targetChatId ? { targetChatId } : {}), + }) + const batch = await fetchStreamBatch(streamId, replaySelection.afterCursor, signal) if (streamGenRef.current !== gen || shouldContinue?.() === false) return seedStreamBatchPreviewSessions(batch) @@ -3570,7 +3826,8 @@ export function useChat( assistantId, gen, { - preserveExistingState: true, + preserveExistingState: replaySelection.preserveExistingState, + suppressedWorkflowToolStartIds: getReplayCompletedWorkflowToolCallIds(batch.events), ...(targetChatId ? { targetChatId } : {}), ...(shouldContinue ? { shouldContinue } : {}), } @@ -3594,7 +3851,7 @@ export function useChat( afterCursor: batch.events.length > 0 ? String(batch.events[batch.events.length - 1].eventId) - : afterCursor, + : replaySelection.afterCursor, }) if ( @@ -3615,7 +3872,13 @@ export function useChat( setTransportIdle() } }, - [fetchStreamBatch, seedStreamBatchPreviewSessions, attachToExistingStream, setTransportIdle] + [ + applyReconnectReplaySelection, + fetchStreamBatch, + seedStreamBatchPreviewSessions, + attachToExistingStream, + setTransportIdle, + ] ) const retryReconnect = useCallback( @@ -3782,6 +4045,8 @@ export function useChat( } const recoveryGen = observedGeneration + 1 + const previousStreamId = streamIdRef.current ?? activeTurnRef.current?.userMessageId + const afterCursor = previousStreamId === streamId ? lastCursorRef.current || '0' : '0' streamGenRef.current = recoveryGen setTransportReconnecting() streamIdRef.current = streamId @@ -3821,7 +4086,6 @@ export function useChat( if (locallyTerminalStreamIdRef.current === streamId) return const assistantId = getLiveAssistantMessageId(streamId) - const afterCursor = lastCursorRef.current || '0' try { await resumeOrFinalize({ diff --git a/apps/sim/lib/copilot/tools/client/store-utils.test.ts b/apps/sim/lib/copilot/tools/client/store-utils.test.ts index 3c3bba51518..78ae7528e60 100644 --- a/apps/sim/lib/copilot/tools/client/store-utils.test.ts +++ b/apps/sim/lib/copilot/tools/client/store-utils.test.ts @@ -37,6 +37,38 @@ describe('resolveToolDisplay', () => { ).toBe('Read RET XYZ') }) + it('formats special workspace file reads as natural language', () => { + expect( + resolveToolDisplay(ReadTool.id, ClientToolCallState.error, { + path: 'files/haiku_collection_sim.pptx/compiled-check', + })?.text + ).toBe('Attempted to read the final file check for haiku_collection_sim.pptx') + + expect( + resolveToolDisplay(ReadTool.id, ClientToolCallState.success, { + path: 'files/by-id/87c18b84-2f83-43a4-bed8-8a86f7d42022/compiled-check', + })?.text + ).toBe('Read the final file check for this file') + + expect( + resolveToolDisplay(ReadTool.id, ClientToolCallState.success, { + path: 'files/by-id/625094cc-2f64-4de9-a39c-452cb8283bb1/content', + })?.text + ).toBe('Read the content of this file') + + expect( + resolveToolDisplay(ReadTool.id, ClientToolCallState.executing, { + path: 'files/report.pdf/meta.json', + })?.text + ).toBe('Reading metadata for report.pdf') + + expect( + resolveToolDisplay(ReadTool.id, ClientToolCallState.success, { + path: 'files/deck.pptx/style', + })?.text + ).toBe('Read style details for deck.pptx') + }) + it('falls back to a humanized tool label for generic tools', () => { expect(resolveToolDisplay('deploy_api', ClientToolCallState.success)?.text).toBe( 'Executed Deploy Api' diff --git a/apps/sim/lib/copilot/tools/client/store-utils.ts b/apps/sim/lib/copilot/tools/client/store-utils.ts index 6780db12807..dafa45ecc6d 100644 --- a/apps/sim/lib/copilot/tools/client/store-utils.ts +++ b/apps/sim/lib/copilot/tools/client/store-utils.ts @@ -96,7 +96,7 @@ function describeReadTarget(path: string | undefined): string | undefined { } if (resourceType === 'file') { - return segments.slice(1).join('/') || segments[segments.length - 1] + return describeFileReadTarget(segments) } if (resourceType === 'workflow') { @@ -107,6 +107,32 @@ function describeReadTarget(path: string | undefined): string | undefined { return stripExtension(resourceName) } +const FILE_SPECIAL_READ_TARGET_PREFIXES: Record = { + content: 'the content of', + 'meta.json': 'metadata for', + style: 'style details for', + 'compiled-check': 'the final file check for', +} + +function describeFileReadTarget(segments: string[]): string { + const lastSegment = segments[segments.length - 1] || '' + const specialPrefix = FILE_SPECIAL_READ_TARGET_PREFIXES[lastSegment] + if (specialPrefix) { + return `${specialPrefix} ${describeSpecialFilePathSubject(segments)}` + } + + return segments.slice(1).join('/') || lastSegment +} + +function describeSpecialFilePathSubject(segments: string[]): string { + if (segments[1] === 'by-id') { + const namedRemainder = segments.slice(3, -1).join('/') + return namedRemainder || 'this file' + } + + return segments.slice(1, -1).join('/') || 'this file' +} + function getLeafResourceSegment(segments: string[]): string { const lastSegment = segments[segments.length - 1] || '' if (hasFileExtension(lastSegment) && segments.length > 1) { diff --git a/apps/sim/lib/copilot/tools/handlers/resources.test.ts b/apps/sim/lib/copilot/tools/handlers/resources.test.ts new file mode 100644 index 00000000000..d573959f9db --- /dev/null +++ b/apps/sim/lib/copilot/tools/handlers/resources.test.ts @@ -0,0 +1,70 @@ +/** + * @vitest-environment node + */ + +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { getWorkspaceFileMock } = vi.hoisted(() => ({ + getWorkspaceFileMock: vi.fn(), +})) + +vi.mock('@sim/db', () => ({ + db: {}, +})) + +vi.mock('@sim/db/schema', () => ({})) + +vi.mock('@/lib/uploads/contexts/workspace/workspace-file-manager', () => ({ + getWorkspaceFile: getWorkspaceFileMock, +})) + +vi.mock('@/lib/workflows/utils', () => ({ + getWorkflowById: vi.fn(), +})) + +vi.mock('@/lib/table/service', () => ({ + getTableById: vi.fn(), +})) + +vi.mock('@/lib/knowledge/service', () => ({ + getKnowledgeBaseById: vi.fn(), +})) + +vi.mock('@/lib/logs/service', () => ({ + getLogById: vi.fn(), +})) + +import { executeOpenResource } from './resources' + +describe('executeOpenResource', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('opens workspace files with canonical non-UUID file ids', async () => { + getWorkspaceFileMock.mockResolvedValue({ + id: 'wf_qL_cfff-FskMsXtOdm599', + name: 'MAC_Brand_Guidelines_May_2021 (1).docx', + }) + + const result = await executeOpenResource( + { + resources: [{ type: 'file', id: 'wf_qL_cfff-FskMsXtOdm599' }], + }, + { userId: 'user-1', workflowId: 'workflow-1', workspaceId: 'workspace-1' } + ) + + expect(getWorkspaceFileMock).toHaveBeenCalledWith('workspace-1', 'wf_qL_cfff-FskMsXtOdm599') + expect(result).toMatchObject({ + success: true, + output: { opened: 1, errors: [] }, + resources: [ + { + type: 'file', + id: 'wf_qL_cfff-FskMsXtOdm599', + title: 'MAC_Brand_Guidelines_May_2021 (1).docx', + }, + ], + }) + }) +}) diff --git a/apps/sim/lib/copilot/tools/handlers/resources.ts b/apps/sim/lib/copilot/tools/handlers/resources.ts index cae410bf8cc..338f187de3e 100644 --- a/apps/sim/lib/copilot/tools/handlers/resources.ts +++ b/apps/sim/lib/copilot/tools/handlers/resources.ts @@ -5,7 +5,6 @@ import { getLogById } from '@/lib/logs/service' import { getTableById } from '@/lib/table/service' import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace/workspace-file-manager' import { getWorkflowById } from '@/lib/workflows/utils' -import { isUuid } from '@/executor/constants' import type { OpenResourceItem, OpenResourceParams, ValidOpenResourceParams } from './param-types' const VALID_OPEN_RESOURCE_TYPES = new Set(Object.values(MothershipResourceType)) @@ -21,8 +20,6 @@ async function resolveResource( if (resourceType === 'file') { if (!context.workspaceId) return { error: 'Opening a workspace file requires workspace context.' } - if (!isUuid(item.id)) - return { error: 'open_resource for files requires the canonical file UUID.' } const record = await getWorkspaceFile(context.workspaceId, item.id) if (!record) return { error: `No workspace file with id "${item.id}".` } resourceId = record.id diff --git a/apps/sim/lib/uploads/contexts/workspace/workspace-file-manager.ts b/apps/sim/lib/uploads/contexts/workspace/workspace-file-manager.ts index a5508b5476b..fda46789dce 100644 --- a/apps/sim/lib/uploads/contexts/workspace/workspace-file-manager.ts +++ b/apps/sim/lib/uploads/contexts/workspace/workspace-file-manager.ts @@ -674,7 +674,7 @@ export function getSandboxWorkspaceFilePath( /** * Find a workspace file record in an existing list from either its id or a VFS/name reference. - * For copilot `open_resource` and the resource panel, use {@link getWorkspaceFile} with a UUID only. + * For copilot `open_resource` and the resource panel, use {@link getWorkspaceFile} with the file id. */ export function findWorkspaceFileRecord( files: WorkspaceFileRecord[], From cb3a876d3e7a661f5cd3fb15f65964324f7b6b1c Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 8 May 2026 23:08:52 -0700 Subject: [PATCH 26/33] feat(knowledge): include sourceUrl in KB search results (#4533) * feat(knowledge): include sourceUrl in KB search results * improvement(kb-search): mark sourceUrl nullable and cover non-null happy path in tests --- apps/docs/content/docs/en/tools/knowledge.mdx | 1 + apps/docs/openapi.json | 6 +++ .../app/api/knowledge/search/route.test.ts | 30 ++++++++----- apps/sim/app/api/knowledge/search/route.ts | 8 ++-- .../app/api/knowledge/search/utils.test.ts | 6 +-- apps/sim/app/api/knowledge/search/utils.ts | 24 +++++++--- .../app/api/v1/knowledge/search/route.test.ts | 44 +++++++++++++++++-- apps/sim/app/api/v1/knowledge/search/route.ts | 8 ++-- apps/sim/tools/knowledge/search.ts | 6 +++ apps/sim/tools/knowledge/types.ts | 1 + apps/sim/tools/types.ts | 1 + 11 files changed, 105 insertions(+), 30 deletions(-) diff --git a/apps/docs/content/docs/en/tools/knowledge.mdx b/apps/docs/content/docs/en/tools/knowledge.mdx index b0e1338d9e0..40da7b3f0e3 100644 --- a/apps/docs/content/docs/en/tools/knowledge.mdx +++ b/apps/docs/content/docs/en/tools/knowledge.mdx @@ -60,6 +60,7 @@ Search for similar content in a knowledge base using vector similarity | `results` | array | Array of search results from the knowledge base | | ↳ `documentId` | string | Document ID | | ↳ `documentName` | string | Document name | +| ↳ `sourceUrl` | string | URL to the original source document \(e.g., Confluence page, Google Doc, Notion page\). Null for documents without an external source. | | ↳ `content` | string | Content of the result | | ↳ `chunkIndex` | number | Index of the chunk within the document | | ↳ `similarity` | number | Similarity score of the result | diff --git a/apps/docs/openapi.json b/apps/docs/openapi.json index febad336bcc..04e135f4aaf 100644 --- a/apps/docs/openapi.json +++ b/apps/docs/openapi.json @@ -5030,6 +5030,7 @@ { "documentId": "doc_abc123", "documentName": "Getting Started.pdf", + "sourceUrl": "https://example.atlassian.net/wiki/spaces/DOCS/pages/12345", "content": "To reset your password, go to Settings > Security.", "chunkIndex": 3, "similarity": 0.95, @@ -6264,6 +6265,11 @@ "type": "string", "description": "Filename of the source document." }, + "sourceUrl": { + "type": "string", + "nullable": true, + "description": "URL to the original source document for connector-synced documents (e.g., a Confluence page, Google Doc, or Notion page). Null for documents without an external source." + }, "content": { "type": "string", "description": "The matched chunk content." diff --git a/apps/sim/app/api/knowledge/search/route.test.ts b/apps/sim/app/api/knowledge/search/route.test.ts index b565092187d..0b36497d0ad 100644 --- a/apps/sim/app/api/knowledge/search/route.test.ts +++ b/apps/sim/app/api/knowledge/search/route.test.ts @@ -24,7 +24,7 @@ const { mockHandleTagAndVectorSearch, mockGetQueryStrategy, mockGenerateSearchEmbedding, - mockGetDocumentNamesByIds, + mockGetDocumentMetadataByIds, } = vi.hoisted(() => ({ mockDbChain: { select: vi.fn().mockReturnThis(), @@ -43,7 +43,7 @@ const { mockHandleTagAndVectorSearch: vi.fn(), mockGetQueryStrategy: vi.fn(), mockGenerateSearchEmbedding: vi.fn(), - mockGetDocumentNamesByIds: vi.fn(), + mockGetDocumentMetadataByIds: vi.fn(), })) const mockCheckKnowledgeBaseAccess = knowledgeApiUtilsMockFns.mockCheckKnowledgeBaseAccess @@ -101,7 +101,7 @@ vi.mock('./utils', () => ({ handleTagAndVectorSearch: mockHandleTagAndVectorSearch, getQueryStrategy: mockGetQueryStrategy, generateSearchEmbedding: mockGenerateSearchEmbedding, - getDocumentNamesByIds: mockGetDocumentNamesByIds, + getDocumentMetadataByIds: mockGetDocumentMetadataByIds, APIError: class APIError extends Error { public status: number constructor(message: string, status: number) { @@ -159,9 +159,9 @@ describe('Knowledge Search API Route', () => { singleQueryOptimized: true, }) mockGenerateSearchEmbedding.mockClear().mockResolvedValue([0.1, 0.2, 0.3, 0.4, 0.5]) - mockGetDocumentNamesByIds.mockClear().mockResolvedValue({ - doc1: 'Document 1', - doc2: 'Document 2', + mockGetDocumentMetadataByIds.mockClear().mockResolvedValue({ + doc1: { filename: 'Document 1', sourceUrl: null }, + doc2: { filename: 'Document 2', sourceUrl: null }, }) mockGetDocumentTagDefinitions.mockClear() hybridAuthMockFns.mockCheckSessionOrInternalAuth.mockClear().mockResolvedValue({ @@ -998,8 +998,11 @@ describe('Knowledge Search API Route', () => { }) mockGenerateSearchEmbedding.mockResolvedValue([0.1, 0.2, 0.3]) - mockGetDocumentNamesByIds.mockResolvedValue({ - 'doc-active': 'Active Document.pdf', + mockGetDocumentMetadataByIds.mockResolvedValue({ + 'doc-active': { + filename: 'Active Document.pdf', + sourceUrl: 'https://example.atlassian.net/wiki/spaces/DOCS/pages/12345', + }, }) const mockTagDefs = { @@ -1023,6 +1026,9 @@ describe('Knowledge Search API Route', () => { expect(data.data.results).toHaveLength(1) expect(data.data.results[0].documentId).toBe('doc-active') expect(data.data.results[0].documentName).toBe('Active Document.pdf') + expect(data.data.results[0].sourceUrl).toBe( + 'https://example.atlassian.net/wiki/spaces/DOCS/pages/12345' + ) }) it('should exclude results from deleted documents in tag search', async () => { @@ -1067,8 +1073,8 @@ describe('Knowledge Search API Route', () => { singleQueryOptimized: true, }) - mockGetDocumentNamesByIds.mockResolvedValue({ - 'doc-active-tagged': 'Active Tagged Document.pdf', + mockGetDocumentMetadataByIds.mockResolvedValue({ + 'doc-active-tagged': { filename: 'Active Tagged Document.pdf', sourceUrl: null }, }) const mockTagDefs = { @@ -1140,8 +1146,8 @@ describe('Knowledge Search API Route', () => { }) mockGenerateSearchEmbedding.mockResolvedValue([0.1, 0.2, 0.3]) - mockGetDocumentNamesByIds.mockResolvedValue({ - 'doc-active-combined': 'Active Combined Search.pdf', + mockGetDocumentMetadataByIds.mockResolvedValue({ + 'doc-active-combined': { filename: 'Active Combined Search.pdf', sourceUrl: null }, }) const mockTagDefs = { diff --git a/apps/sim/app/api/knowledge/search/route.ts b/apps/sim/app/api/knowledge/search/route.ts index 94c09f6c138..f93c0f5afe6 100644 --- a/apps/sim/app/api/knowledge/search/route.ts +++ b/apps/sim/app/api/knowledge/search/route.ts @@ -16,7 +16,7 @@ import type { StructuredFilter } from '@/lib/knowledge/types' import { estimateTokenCount } from '@/lib/tokenization/estimators' import { generateSearchEmbedding, - getDocumentNamesByIds, + getDocumentMetadataByIds, getQueryStrategy, handleTagAndVectorSearch, handleTagOnlySearch, @@ -413,7 +413,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { }) const documentIds = results.map((result) => result.documentId) - const documentNameMap = await getDocumentNamesByIds(documentIds) + const documentMetadataMap = await getDocumentMetadataByIds(documentIds) try { PlatformEvents.knowledgeBaseSearched({ @@ -449,9 +449,11 @@ export const POST = withRouteHandler(async (request: NextRequest) => { }) const rerankerScore = rerankedScores.get(result.id) + const docMeta = documentMetadataMap[result.documentId] return { documentId: result.documentId, - documentName: documentNameMap[result.documentId] || undefined, + documentName: docMeta?.filename || undefined, + sourceUrl: docMeta?.sourceUrl ?? null, content: result.content, chunkIndex: result.chunkIndex, metadata: tags, diff --git a/apps/sim/app/api/knowledge/search/utils.test.ts b/apps/sim/app/api/knowledge/search/utils.test.ts index 9ebdbe89b3c..526fb12c73b 100644 --- a/apps/sim/app/api/knowledge/search/utils.test.ts +++ b/apps/sim/app/api/knowledge/search/utils.test.ts @@ -396,11 +396,11 @@ describe('Knowledge Search Utils', () => { }) }) - describe('getDocumentNamesByIds', () => { + describe('getDocumentMetadataByIds', () => { it('should handle empty input gracefully', async () => { - const { getDocumentNamesByIds } = await import('./utils') + const { getDocumentMetadataByIds } = await import('./utils') - const result = await getDocumentNamesByIds([]) + const result = await getDocumentMetadataByIds([]) expect(result).toEqual({}) }) diff --git a/apps/sim/app/api/knowledge/search/utils.ts b/apps/sim/app/api/knowledge/search/utils.ts index 8ca7e7c438a..afaff875b5b 100644 --- a/apps/sim/app/api/knowledge/search/utils.ts +++ b/apps/sim/app/api/knowledge/search/utils.ts @@ -3,9 +3,22 @@ import { document, embedding } from '@sim/db/schema' import { and, eq, inArray, isNull, sql } from 'drizzle-orm' import type { StructuredFilter } from '@/lib/knowledge/types' -export async function getDocumentNamesByIds( +export interface DocumentMetadata { + filename: string + sourceUrl: string | null +} + +/** + * Batch-fetch display metadata for documents referenced by search results. + * Excludes documents that are user-excluded, archived, or soft-deleted — + * mirrors the visibility filters applied inside the search SQL itself, so + * the lookup will never surface metadata for a row a caller could not have + * legitimately matched. Returns a map keyed by document id; missing ids + * indicate the document is no longer visible and should be skipped. + */ +export async function getDocumentMetadataByIds( documentIds: string[] -): Promise> { +): Promise> { if (documentIds.length === 0) { return {} } @@ -15,6 +28,7 @@ export async function getDocumentNamesByIds( .select({ id: document.id, filename: document.filename, + sourceUrl: document.sourceUrl, }) .from(document) .where( @@ -26,12 +40,12 @@ export async function getDocumentNamesByIds( ) ) - const documentNameMap: Record = {} + const map: Record = {} documents.forEach((doc) => { - documentNameMap[doc.id] = doc.filename + map[doc.id] = { filename: doc.filename, sourceUrl: doc.sourceUrl ?? null } }) - return documentNameMap + return map } export interface SearchResult { diff --git a/apps/sim/app/api/v1/knowledge/search/route.test.ts b/apps/sim/app/api/v1/knowledge/search/route.test.ts index beaaa02ea59..8c9842ca0b6 100644 --- a/apps/sim/app/api/v1/knowledge/search/route.test.ts +++ b/apps/sim/app/api/v1/knowledge/search/route.test.ts @@ -15,7 +15,7 @@ const { mockHandleTagAndVectorSearch, mockGetQueryStrategy, mockGenerateSearchEmbedding, - mockGetDocumentNamesByIds, + mockGetDocumentMetadataByIds, mockAuthenticateRequest, mockValidateWorkspaceAccess, } = vi.hoisted(() => ({ @@ -24,7 +24,7 @@ const { mockHandleTagAndVectorSearch: vi.fn(), mockGetQueryStrategy: vi.fn(), mockGenerateSearchEmbedding: vi.fn(), - mockGetDocumentNamesByIds: vi.fn(), + mockGetDocumentMetadataByIds: vi.fn(), mockAuthenticateRequest: vi.fn(), mockValidateWorkspaceAccess: vi.fn(), })) @@ -35,7 +35,7 @@ vi.mock('@/app/api/knowledge/search/utils', () => ({ handleTagAndVectorSearch: mockHandleTagAndVectorSearch, getQueryStrategy: mockGetQueryStrategy, generateSearchEmbedding: mockGenerateSearchEmbedding, - getDocumentNamesByIds: mockGetDocumentNamesByIds, + getDocumentMetadataByIds: mockGetDocumentMetadataByIds, })) vi.mock('@/app/api/knowledge/utils', () => knowledgeApiUtilsMock) @@ -81,7 +81,7 @@ describe('v1 knowledge search route — per-KB embedding model', () => { mockGetQueryStrategy.mockReturnValue({ distanceThreshold: 0.5 }) mockGenerateSearchEmbedding.mockResolvedValue([0.1, 0.2, 0.3]) mockHandleVectorOnlySearch.mockResolvedValue([]) - mockGetDocumentNamesByIds.mockResolvedValue({}) + mockGetDocumentMetadataByIds.mockResolvedValue({}) }) it('passes the KB embedding model into generateSearchEmbedding', async () => { @@ -127,6 +127,42 @@ describe('v1 knowledge search route — per-KB embedding model', () => { expect(mockGenerateSearchEmbedding).not.toHaveBeenCalled() }) + it('surfaces sourceUrl from document metadata in search results', async () => { + mockCheckKnowledgeBaseAccess.mockResolvedValueOnce({ + hasAccess: true, + knowledgeBase: baseKb('kb-confluence', 'text-embedding-3-small'), + }) + mockHandleVectorOnlySearch.mockResolvedValue([ + { + documentId: 'doc-confluence', + knowledgeBaseId: 'kb-confluence', + content: 'page content', + chunkIndex: 0, + distance: 0.1, + }, + ]) + mockGetDocumentMetadataByIds.mockResolvedValue({ + 'doc-confluence': { + filename: 'Runbook.md', + sourceUrl: 'https://example.atlassian.net/wiki/spaces/DOCS/pages/12345', + }, + }) + + const req = createMockRequest('POST', { + workspaceId: 'ws-1', + knowledgeBaseIds: 'kb-confluence', + query: 'runbook', + }) + const res = await POST(req) + const body = await res.json() + + expect(res.status).toBe(200) + expect(body.data.results[0].sourceUrl).toBe( + 'https://example.atlassian.net/wiki/spaces/DOCS/pages/12345' + ) + expect(body.data.results[0].documentName).toBe('Runbook.md') + }) + it('allows tag-only search across mixed embedding models', async () => { mockHandleTagOnlySearch.mockResolvedValue([]) mockCheckKnowledgeBaseAccess.mockResolvedValueOnce({ diff --git a/apps/sim/app/api/v1/knowledge/search/route.ts b/apps/sim/app/api/v1/knowledge/search/route.ts index fdfc1fb2f1e..32679d24b6b 100644 --- a/apps/sim/app/api/v1/knowledge/search/route.ts +++ b/apps/sim/app/api/v1/knowledge/search/route.ts @@ -8,7 +8,7 @@ import { buildUndefinedTagsError, validateTagValue } from '@/lib/knowledge/tags/ import type { StructuredFilter } from '@/lib/knowledge/types' import { generateSearchEmbedding, - getDocumentNamesByIds, + getDocumentMetadataByIds, getQueryStrategy, handleTagAndVectorSearch, handleTagOnlySearch, @@ -205,7 +205,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { }) const documentIds = results.map((r) => r.documentId) - const documentNameMap = await getDocumentNamesByIds(documentIds) + const documentMetadataMap = await getDocumentMetadataByIds(documentIds) return NextResponse.json({ success: true, @@ -222,9 +222,11 @@ export const POST = withRouteHandler(async (request: NextRequest) => { } }) + const docMeta = documentMetadataMap[result.documentId] return { documentId: result.documentId, - documentName: documentNameMap[result.documentId] || undefined, + documentName: docMeta?.filename || undefined, + sourceUrl: docMeta?.sourceUrl ?? null, content: result.content, chunkIndex: result.chunkIndex, metadata: tags, diff --git a/apps/sim/tools/knowledge/search.ts b/apps/sim/tools/knowledge/search.ts index 09da5193704..3a01ef05afb 100644 --- a/apps/sim/tools/knowledge/search.ts +++ b/apps/sim/tools/knowledge/search.ts @@ -177,6 +177,12 @@ export const knowledgeSearchTool: ToolConfig = { properties: { documentId: { type: 'string', description: 'Document ID' }, documentName: { type: 'string', description: 'Document name' }, + sourceUrl: { + type: 'string', + nullable: true, + description: + 'URL to the original source document (e.g., Confluence page, Google Doc, Notion page). Null for documents without an external source.', + }, content: { type: 'string', description: 'Content of the result' }, chunkIndex: { type: 'number', description: 'Index of the chunk within the document' }, similarity: { type: 'number', description: 'Similarity score of the result' }, diff --git a/apps/sim/tools/knowledge/types.ts b/apps/sim/tools/knowledge/types.ts index 31005d638d7..8b06e17cf36 100644 --- a/apps/sim/tools/knowledge/types.ts +++ b/apps/sim/tools/knowledge/types.ts @@ -36,6 +36,7 @@ export function inferDocumentFileInfo(documentName: string): { export interface KnowledgeSearchResult { documentId: string documentName: string + sourceUrl: string | null content: string chunkIndex: number metadata: Record diff --git a/apps/sim/tools/types.ts b/apps/sim/tools/types.ts index 535761646bc..d0917d6466f 100644 --- a/apps/sim/tools/types.ts +++ b/apps/sim/tools/types.ts @@ -46,6 +46,7 @@ export interface OutputProperty { type: OutputType description?: string optional?: boolean + nullable?: boolean properties?: Record items?: { type: OutputType From 1d3ca79779e359040f55c09fef095a13dfd2a135 Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 8 May 2026 23:21:09 -0700 Subject: [PATCH 27/33] chore(deps): audit and clean up dependencies (#4531) * chore(deps): audit and clean up dependencies - Remove unused: chalk, chart.js, dotenv, encoding, entities, thread-stream, uuid, @opentelemetry/exporter-jaeger, critters, marked, redis, soap - Replace soap with hand-rolled Workday SOAP client - Migrate marked to unified pipeline for inbox responses - Bump zustand v5, @react-email/* - Align all @aws-sdk/* to 3.1032.0 - Move type-only deps to devDependencies - Remove duplicate drizzle-orm/postgres overrides * fix(workday): coerce SOAP scalar strings to typed booleans/numbers - XML parser returns leaf text as strings; `!"false"` evaluated to `false`, causing all organizations to report `isActive: false` - Add parseSoapBoolean and parseSoapNumber helpers and apply at consumer sites (Inactive, Total_Results) - Drop unused service/soapAction fields from WD_OPERATIONS map * fix(workday): coerce compensation amounts and guard Date marshaling - get-compensation returned Amount/Per_Unit_Amount/Individual_Target_Amount as strings (XML leaf text), violating the tool's number contract - Coerce via parseSoapNumber and widen plan type to number | string - Add defensive Date branch in marshal() so Date inputs serialize as ISO 8601 instead of String(date) --- .../tools/workday/get-compensation/route.ts | 7 +- .../tools/workday/get-organizations/route.ts | 21 +- .../api/tools/workday/list-workers/route.ts | 3 +- apps/sim/lib/mothership/inbox/response.ts | 25 +- apps/sim/next.config.ts | 3 - apps/sim/package.json | 61 +- apps/sim/tools/workday/index.ts | 3 +- apps/sim/tools/workday/soap.ts | 521 +++++++++++- bun.lock | 777 ++++-------------- 9 files changed, 726 insertions(+), 695 deletions(-) diff --git a/apps/sim/app/api/tools/workday/get-compensation/route.ts b/apps/sim/app/api/tools/workday/get-compensation/route.ts index ce6f03e41f8..9ed21ecdc64 100644 --- a/apps/sim/app/api/tools/workday/get-compensation/route.ts +++ b/apps/sim/app/api/tools/workday/get-compensation/route.ts @@ -9,6 +9,7 @@ import { createWorkdaySoapClient, extractRefId, normalizeSoapArray, + parseSoapNumber, type WorkdayCompensationDataSoap, type WorkdayCompensationPlanSoap, type WorkdayWorkerSoap, @@ -60,7 +61,11 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const mapPlan = (p: WorkdayCompensationPlanSoap) => ({ id: extractRefId(p.Compensation_Plan_Reference) ?? null, planName: p.Compensation_Plan_Reference?.attributes?.Descriptor ?? null, - amount: p.Amount ?? p.Per_Unit_Amount ?? p.Individual_Target_Amount ?? null, + amount: + parseSoapNumber(p.Amount) ?? + parseSoapNumber(p.Per_Unit_Amount) ?? + parseSoapNumber(p.Individual_Target_Amount) ?? + null, currency: extractRefId(p.Currency_Reference) ?? null, frequency: extractRefId(p.Frequency_Reference) ?? null, }) diff --git a/apps/sim/app/api/tools/workday/get-organizations/route.ts b/apps/sim/app/api/tools/workday/get-organizations/route.ts index e6e03fbba86..adbf5304242 100644 --- a/apps/sim/app/api/tools/workday/get-organizations/route.ts +++ b/apps/sim/app/api/tools/workday/get-organizations/route.ts @@ -9,6 +9,8 @@ import { createWorkdaySoapClient, extractRefId, normalizeSoapArray, + parseSoapBoolean, + parseSoapNumber, type WorkdayOrganizationSoap, } from '@/tools/workday/soap' @@ -63,15 +65,18 @@ export const POST = withRouteHandler(async (request: NextRequest) => { | undefined ) - const organizations = orgsArray.map((o) => ({ - id: extractRefId(o.Organization_Reference) ?? null, - descriptor: o.Organization_Descriptor ?? null, - type: extractRefId(o.Organization_Data?.Organization_Type_Reference) ?? null, - subtype: extractRefId(o.Organization_Data?.Organization_Subtype_Reference) ?? null, - isActive: o.Organization_Data?.Inactive != null ? !o.Organization_Data.Inactive : null, - })) + const organizations = orgsArray.map((o) => { + const inactive = parseSoapBoolean(o.Organization_Data?.Inactive) + return { + id: extractRefId(o.Organization_Reference) ?? null, + descriptor: o.Organization_Descriptor ?? null, + type: extractRefId(o.Organization_Data?.Organization_Type_Reference) ?? null, + subtype: extractRefId(o.Organization_Data?.Organization_Subtype_Reference) ?? null, + isActive: inactive == null ? null : !inactive, + } + }) - const total = result?.Response_Results?.Total_Results ?? organizations.length + const total = parseSoapNumber(result?.Response_Results?.Total_Results) ?? organizations.length return NextResponse.json({ success: true, diff --git a/apps/sim/app/api/tools/workday/list-workers/route.ts b/apps/sim/app/api/tools/workday/list-workers/route.ts index 9fb4406f475..2d7f943d475 100644 --- a/apps/sim/app/api/tools/workday/list-workers/route.ts +++ b/apps/sim/app/api/tools/workday/list-workers/route.ts @@ -9,6 +9,7 @@ import { createWorkdaySoapClient, extractRefId, normalizeSoapArray, + parseSoapNumber, type WorkdayWorkerSoap, } from '@/tools/workday/soap' @@ -61,7 +62,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { employmentData: w.Worker_Data?.Employment_Data ?? null, })) - const total = result?.Response_Results?.Total_Results ?? workers.length + const total = parseSoapNumber(result?.Response_Results?.Total_Results) ?? workers.length return NextResponse.json({ success: true, diff --git a/apps/sim/lib/mothership/inbox/response.ts b/apps/sim/lib/mothership/inbox/response.ts index 786d43c2e22..cd41de83f13 100644 --- a/apps/sim/lib/mothership/inbox/response.ts +++ b/apps/sim/lib/mothership/inbox/response.ts @@ -1,5 +1,10 @@ import { createLogger } from '@sim/logger' -import { marked } from 'marked' +import { toHtml } from 'hast-util-to-html' +import remarkBreaks from 'remark-breaks' +import remarkGfm from 'remark-gfm' +import remarkParse from 'remark-parse' +import remarkRehype from 'remark-rehype' +import { unified } from 'unified' import { getBaseUrl } from '@/lib/core/utils/urls' import * as agentmail from '@/lib/mothership/inbox/agentmail-client' import { replaceUntilStable } from '@/lib/mothership/inbox/format' @@ -37,7 +42,7 @@ export async function sendInboxResponse( : `I wasn't able to complete this task.\n\nError: ${result.error || 'Unknown error'}\n\n[View details](${chatUrl})\n\nBest,\nMothership` const html = result.success - ? renderEmailHtml(result.content, chatUrl) + ? await renderEmailHtml(result.content, chatUrl) : renderErrorHtml(result.error || 'Unknown error', chatUrl) try { @@ -93,8 +98,20 @@ function stripUnsafeUrls(html: string): string { return html.replace(/href\s*=\s*"(javascript|vbscript|data):[^"]*"/gi, 'href="#"') } -function renderEmailHtml(markdown: string, chatUrl: string): string { - const bodyHtml = stripUnsafeUrls(marked.parse(stripRawHtml(markdown), { async: false }) as string) +const markdownProcessor = unified() + .use(remarkParse) + .use(remarkGfm) + .use(remarkBreaks) + .use(remarkRehype) + +async function markdownToHtml(markdown: string): Promise { + const mdast = markdownProcessor.parse(markdown) + const hast = await markdownProcessor.run(mdast) + return toHtml(hast) +} + +async function renderEmailHtml(markdown: string, chatUrl: string): Promise { + const bodyHtml = stripUnsafeUrls(await markdownToHtml(stripRawHtml(markdown))) return ` diff --git a/apps/sim/next.config.ts b/apps/sim/next.config.ts index 8d750b386e5..558264ea768 100644 --- a/apps/sim/next.config.ts +++ b/apps/sim/next.config.ts @@ -81,9 +81,6 @@ const nextConfig: NextConfig = { 'unpdf', 'ffmpeg-static', 'fluent-ffmpeg', - 'pino', - 'pino-pretty', - 'thread-stream', 'ws', 'isolated-vm', ], diff --git a/apps/sim/package.json b/apps/sim/package.json index 2304b992c53..dab333326b0 100644 --- a/apps/sim/package.json +++ b/apps/sim/package.json @@ -33,24 +33,24 @@ "@1password/sdk": "0.3.1", "@a2a-js/sdk": "0.3.7", "@anthropic-ai/sdk": "0.71.2", - "@aws-sdk/client-athena": "3.1024.0", - "@aws-sdk/client-bedrock-runtime": "3.940.0", - "@aws-sdk/client-cloudformation": "3.1019.0", - "@aws-sdk/client-cloudwatch": "3.940.0", - "@aws-sdk/client-cloudwatch-logs": "3.940.0", - "@aws-sdk/client-dynamodb": "3.940.0", - "@aws-sdk/client-iam": "3.1029.0", + "@aws-sdk/client-athena": "3.1032.0", + "@aws-sdk/client-bedrock-runtime": "3.1032.0", + "@aws-sdk/client-cloudformation": "3.1032.0", + "@aws-sdk/client-cloudwatch": "3.1032.0", + "@aws-sdk/client-cloudwatch-logs": "3.1032.0", + "@aws-sdk/client-dynamodb": "3.1032.0", + "@aws-sdk/client-iam": "3.1032.0", "@aws-sdk/client-identitystore": "3.1032.0", "@aws-sdk/client-organizations": "3.1032.0", - "@aws-sdk/client-rds-data": "3.940.0", - "@aws-sdk/client-s3": "^3.779.0", - "@aws-sdk/client-secrets-manager": "3.940.0", - "@aws-sdk/client-sesv2": "3.940.0", - "@aws-sdk/client-sqs": "3.947.0", + "@aws-sdk/client-rds-data": "3.1032.0", + "@aws-sdk/client-s3": "3.1032.0", + "@aws-sdk/client-secrets-manager": "3.1032.0", + "@aws-sdk/client-sesv2": "3.1032.0", + "@aws-sdk/client-sqs": "3.1032.0", "@aws-sdk/client-sso-admin": "3.1032.0", - "@aws-sdk/client-sts": "3.1029.0", - "@aws-sdk/lib-dynamodb": "3.940.0", - "@aws-sdk/s3-request-presigner": "^3.779.0", + "@aws-sdk/client-sts": "3.1032.0", + "@aws-sdk/lib-dynamodb": "3.1032.0", + "@aws-sdk/s3-request-presigner": "3.1032.0", "@azure/communication-email": "1.0.0", "@azure/storage-blob": "12.27.0", "@better-auth/sso": "1.3.12", @@ -65,7 +65,6 @@ "@modelcontextprotocol/sdk": "1.29.0", "@monaco-editor/react": "4.7.0", "@opentelemetry/api": "^1.9.0", - "@opentelemetry/exporter-jaeger": "2.1.0", "@opentelemetry/exporter-trace-otlp-http": "^0.200.0", "@opentelemetry/resources": "^2.0.0", "@opentelemetry/sdk-node": "^0.200.0", @@ -92,8 +91,8 @@ "@radix-ui/react-toggle": "^1.1.2", "@radix-ui/react-tooltip": "1.2.8", "@radix-ui/react-visually-hidden": "1.2.4", - "@react-email/components": "^0.0.34", - "@react-email/render": "2.0.0", + "@react-email/components": "0.5.7", + "@react-email/render": "2.0.8", "@sim/audit": "workspace:*", "@sim/logger": "workspace:*", "@sim/realtime-protocol": "workspace:*", @@ -106,15 +105,11 @@ "@tanstack/react-query": "5.90.8", "@tanstack/react-query-devtools": "5.90.2", "@trigger.dev/sdk": "4.4.3", - "@types/react-window": "2.0.0", - "@types/three": "0.177.0", "ajv": "8.18.0", "better-auth": "1.3.12", "better-auth-harmony": "1.3.1", "binary-extensions": "^2.0.0", "browser-image-compression": "^2.0.2", - "chalk": "5.6.2", - "chart.js": "4.5.1", "cheerio": "1.1.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -127,8 +122,6 @@ "docx": "^9.6.1", "docx-preview": "^0.3.7", "drizzle-orm": "^0.45.2", - "encoding": "0.1.13", - "entities": "6.0.1", "es-toolkit": "1.45.1", "ffmpeg-static": "5.3.0", "fluent-ffmpeg": "2.1.3", @@ -137,6 +130,7 @@ "google-auth-library": "10.5.0", "gray-matter": "^4.0.3", "groq-sdk": "^0.15.0", + "hast-util-to-html": "9.0.5", "html-to-image": "1.11.13", "html-to-text": "^9.0.5", "idb-keyval": "6.2.2", @@ -153,7 +147,6 @@ "jwt-decode": "^4.0.0", "lucide-react": "^0.479.0", "mammoth": "^1.9.0", - "marked": "17.0.4", "mermaid": "11.14.0", "micromatch": "4.0.8", "monaco-editor": "0.55.1", @@ -184,16 +177,16 @@ "react-simple-code-editor": "^0.14.1", "react-window": "2.2.3", "reactflow": "^11.11.4", - "redis": "5.10.0", "rehype-autolink-headings": "^7.1.0", "rehype-slug": "^6.0.0", "remark-breaks": "^4.0.0", "remark-gfm": "4.0.1", + "remark-parse": "11.0.0", + "remark-rehype": "11.1.2", "resend": "^4.1.2", "rss-parser": "3.13.0", "safe-regex2": "5.1.0", "sharp": "0.34.3", - "soap": "1.8.0", "socket.io-client": "4.8.1", "ssh2": "^1.17.0", "streamdown": "2.5.0", @@ -201,14 +194,13 @@ "svix": "1.88.0", "tailwind-merge": "^2.6.0", "tailwindcss-animate": "^1.0.7", - "thread-stream": "4.0.0", "three": "0.177.0", "twilio": "5.9.0", + "unified": "11.0.5", "unpdf": "1.4.0", - "uuid": "^11.1.0", "xlsx": "https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz", "zod": "4.3.6", - "zustand": "^4.5.7" + "zustand": "^5.0.13" }, "devDependencies": { "@sim/testing": "workspace:*", @@ -227,16 +219,15 @@ "@types/prismjs": "^1.26.5", "@types/react": "^19", "@types/react-dom": "^19", + "@types/react-window": "2.0.0", "@types/ssh2": "^1.15.5", + "@types/three": "0.177.0", "@vitejs/plugin-react": "^4.3.4", "@vitest/coverage-v8": "^3.0.8", "autoprefixer": "10.4.21", - "concurrently": "^9.1.0", - "critters": "0.0.25", - "dotenv": "^16.4.7", "jsdom": "^26.0.0", "postcss": "^8", - "react-email": "^4.0.13", + "react-email": "4.3.2", "tailwindcss": "^3.4.1", "typescript": "^5.7.3", "vite-tsconfig-paths": "^5.1.4", @@ -252,8 +243,6 @@ "overrides": { "next": "16.2.4", "@next/env": "16.2.4", - "drizzle-orm": "^0.45.2", - "postgres": "^3.4.5", "react-floater": { "react": "$react", "react-dom": "$react-dom" diff --git a/apps/sim/tools/workday/index.ts b/apps/sim/tools/workday/index.ts index 6a93351db52..7642089f518 100644 --- a/apps/sim/tools/workday/index.ts +++ b/apps/sim/tools/workday/index.ts @@ -9,6 +9,7 @@ import { listWorkersTool } from '@/tools/workday/list_workers' import { terminateWorkerTool } from '@/tools/workday/terminate_worker' import { updateWorkerTool } from '@/tools/workday/update_worker' +export * from './types' export { assignOnboardingTool as workdayAssignOnboardingTool, changeJobTool as workdayChangeJobTool, @@ -21,5 +22,3 @@ export { terminateWorkerTool as workdayTerminateWorkerTool, updateWorkerTool as workdayUpdateWorkerTool, } - -export * from './types' diff --git a/apps/sim/tools/workday/soap.ts b/apps/sim/tools/workday/soap.ts index aeba1391a6c..7342c82629b 100644 --- a/apps/sim/tools/workday/soap.ts +++ b/apps/sim/tools/workday/soap.ts @@ -1,5 +1,4 @@ import { createLogger } from '@sim/logger' -import * as soap from 'soap' import { validateWorkdayTenantUrl } from '@/lib/core/security/input-validation' const logger = createLogger('WorkdaySoapClient') @@ -16,10 +15,10 @@ export type WorkdayServiceKey = keyof typeof WORKDAY_SERVICES export interface WorkdaySoapResult { Response_Data?: Record Response_Results?: { - Total_Results?: number - Total_Pages?: number - Page_Results?: number - Page?: number + Total_Results?: number | string + Total_Pages?: number | string + Page_Results?: number | string + Page?: number | string } Event_Reference?: WorkdayReference Employee_Reference?: WorkdayReference @@ -80,9 +79,9 @@ export interface WorkdayCompensationDataSoap { export interface WorkdayCompensationPlanSoap { Compensation_Plan_Reference?: WorkdayReference - Amount?: number - Per_Unit_Amount?: number - Individual_Target_Amount?: number + Amount?: number | string + Per_Unit_Amount?: number | string + Individual_Target_Amount?: number | string Currency_Reference?: WorkdayReference Frequency_Reference?: WorkdayReference } @@ -99,7 +98,7 @@ export interface WorkdayOrganizationSoap { export interface WorkdayOrganizationDataSoap { Organization_Type_Reference?: WorkdayReference Organization_Subtype_Reference?: WorkdayReference - Inactive?: boolean + Inactive?: boolean | string } /** @@ -111,11 +110,57 @@ export function normalizeSoapArray(value: T | T[] | undefined): T[] { return Array.isArray(value) ? value : [value] } +/** + * Coerces a SOAP scalar to a boolean. The XML parser returns leaf text as strings, + * so `"true"`/`"false"` must be normalized before boolean operations like negation. + * Returns null when the value is null/undefined or unrecognized. + */ +export function parseSoapBoolean(value: unknown): boolean | null { + if (value == null) return null + if (typeof value === 'boolean') return value + if (typeof value === 'string') { + const trimmed = value.trim().toLowerCase() + if (trimmed === 'true' || trimmed === '1') return true + if (trimmed === 'false' || trimmed === '0') return false + } + return null +} + +/** + * Coerces a SOAP scalar to a number. The XML parser returns leaf text as strings, + * so numeric fields like `Total_Results` must be normalized before arithmetic. + * Returns null when the value is null/undefined or not a finite number. + */ +export function parseSoapNumber(value: unknown): number | null { + if (value == null) return null + if (typeof value === 'number') return Number.isFinite(value) ? value : null + if (typeof value === 'string') { + const trimmed = value.trim() + if (trimmed === '') return null + const n = Number(trimmed) + return Number.isFinite(n) ? n : null + } + return null +} + +const WD_OPERATIONS = [ + 'Get_Workers', + 'Get_Organizations', + 'Put_Applicant', + 'Hire_Employee', + 'Change_Job', + 'Terminate_Employee', + 'Change_Personal_Information', + 'Put_Onboarding_Plan_Assignment', +] as const + +type WorkdayOperation = (typeof WD_OPERATIONS)[number] + type SoapOperationFn = ( args: Record ) => Promise<[WorkdaySoapResult, string, Record, string]> -export interface WorkdayClient extends soap.Client { +export interface WorkdayClient { Get_WorkersAsync: SoapOperationFn Get_OrganizationsAsync: SoapOperationFn Put_ApplicantAsync: SoapOperationFn @@ -127,12 +172,12 @@ export interface WorkdayClient extends soap.Client { } /** - * Builds the WSDL URL for a Workday SOAP service. - * Pattern: {tenantUrl}/ccx/service/{tenant}/{serviceName}/{version}?wsdl + * Builds the service endpoint URL for a Workday SOAP service. + * Pattern: {tenantUrl}/ccx/service/{tenant}/{serviceName}/{version} * * @throws Error if tenantUrl is not a trusted Workday-hosted URL (SSRF guard) */ -export function buildWsdlUrl( +export function buildServiceUrl( tenantUrl: string, tenant: string, service: WorkdayServiceKey @@ -143,12 +188,433 @@ export function buildWsdlUrl( } const svc = WORKDAY_SERVICES[service] const baseUrl = (validation.sanitized ?? tenantUrl).replace(/\/$/, '') - return `${baseUrl}/ccx/service/${tenant}/${svc.name}/${svc.version}?wsdl` + return `${baseUrl}/ccx/service/${tenant}/${svc.name}/${svc.version}` +} + +/** + * Builds the WSDL URL for a Workday SOAP service. Retained for backwards compatibility + * with any external consumers; the runtime no longer fetches the WSDL. + */ +export function buildWsdlUrl( + tenantUrl: string, + tenant: string, + service: WorkdayServiceKey +): string { + return `${buildServiceUrl(tenantUrl, tenant, service)}?wsdl` +} + +const XML_ENTITIES: Record = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''', +} + +function escapeXml(value: string): string { + return value.replace(/[&<>"']/g, (c) => XML_ENTITIES[c] ?? c) +} + +function serializeAttributes(attrs?: Record): string { + if (!attrs) return '' + let out = '' + for (const [k, v] of Object.entries(attrs)) { + if (v === undefined || v === null) continue + out += ` ${k}="${escapeXml(String(v))}"` + } + return out +} + +/** + * Marshals a JS value into XML under the `wd:` namespace. + * Conventions: + * - Plain objects become elements with named children + * - `attributes` becomes element attributes + * - `$value` (or `_`) provides the element text content + * - Arrays produce repeated elements with the same name + * - Booleans render as "true"/"false", numbers via String() + */ +function marshal(name: string, value: unknown): string { + if (value === undefined || value === null) return '' + const tag = `wd:${name}` + + if (Array.isArray(value)) { + let out = '' + for (const item of value) { + out += marshal(name, item) + } + return out + } + + if (value instanceof Date) { + return `<${tag}>${value.toISOString()}` + } + + if (typeof value === 'object') { + const obj = value as Record + const attrs = obj.attributes as Record | undefined + const text = (obj.$value ?? obj._) as string | number | boolean | undefined + + if (text !== undefined) { + const childKeys = Object.keys(obj).filter( + (k) => k !== 'attributes' && k !== '$value' && k !== '_' + ) + if (childKeys.length === 0) { + return `<${tag}${serializeAttributes(attrs)}>${escapeXml(String(text))}` + } + } + + let inner = '' + for (const [k, v] of Object.entries(obj)) { + if (k === 'attributes' || k === '$value' || k === '_') continue + inner += marshal(k, v) + } + if (text !== undefined) inner = escapeXml(String(text)) + inner + return `<${tag}${serializeAttributes(attrs)}>${inner}` + } + + if (typeof value === 'boolean') { + return `<${tag}>${value ? 'true' : 'false'}` + } + + return `<${tag}>${escapeXml(String(value))}` +} + +function buildEnvelope( + operation: string, + args: Record, + username: string, + password: string +): string { + let body = '' + for (const [k, v] of Object.entries(args)) { + body += marshal(k, v) + } + + const wsseNs = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd' + const wssePwdType = + 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText' + + return ( + `` + + `` + + `` + + `` + + `` + + `${escapeXml(username)}` + + `${escapeXml(password)}` + + `` + + `` + + `` + + `` + + `` + + body + + `` + + `` + + `` + ) +} + +interface XmlNode { + name: string + localName: string + attributes: Record + children: XmlNode[] + text: string +} + +/** + * Minimal XML parser tuned for Workday SOAP responses: namespaced tags, + * attributes, mixed text, self-closing tags, and CDATA sections. + * Not a general-purpose parser — it does not expand entities beyond the + * standard five and ignores processing instructions and DOCTYPE. + */ +function parseXml(xml: string): XmlNode { + let i = 0 + const len = xml.length + + function skipWhitespace() { + while (i < len && xml.charCodeAt(i) <= 32) i++ + } + + function readName(): string { + const start = i + while (i < len) { + const c = xml[i] + if ( + c === ' ' || + c === '\t' || + c === '\n' || + c === '\r' || + c === '>' || + c === '/' || + c === '=' + ) + break + i++ + } + return xml.slice(start, i) + } + + function readAttributes(): Record { + const attrs: Record = {} + while (i < len) { + skipWhitespace() + const c = xml[i] + if (c === '>' || c === '/' || c === '?') return attrs + const name = readName() + skipWhitespace() + if (xml[i] !== '=') { + attrs[name] = '' + continue + } + i++ // = + skipWhitespace() + const quote = xml[i] + if (quote !== '"' && quote !== "'") { + attrs[name] = '' + continue + } + i++ + const start = i + while (i < len && xml[i] !== quote) i++ + attrs[name] = decodeEntities(xml.slice(start, i)) + if (i < len) i++ // closing quote + } + return attrs + } + + function decodeEntities(s: string): string { + return s.replace(/&(amp|lt|gt|quot|apos|#\d+|#x[0-9a-fA-F]+);/g, (_, ent) => { + switch (ent) { + case 'amp': + return '&' + case 'lt': + return '<' + case 'gt': + return '>' + case 'quot': + return '"' + case 'apos': + return "'" + default: + if (ent.startsWith('#x')) return String.fromCodePoint(Number.parseInt(ent.slice(2), 16)) + if (ent.startsWith('#')) return String.fromCodePoint(Number.parseInt(ent.slice(1), 10)) + return `&${ent};` + } + }) + } + + function localOf(name: string): string { + const idx = name.indexOf(':') + return idx === -1 ? name : name.slice(idx + 1) + } + + function parseNode(): XmlNode { + if (xml[i] !== '<') throw new Error(`Expected '<' at ${i}`) + i++ + const name = readName() + const attrs = readAttributes() + skipWhitespace() + const node: XmlNode = { + name, + localName: localOf(name), + attributes: attrs, + children: [], + text: '', + } + if (xml[i] === '/') { + i += 2 // /> + return node + } + if (xml[i] !== '>') throw new Error(`Expected '>' at ${i}`) + i++ + + while (i < len) { + if (xml[i] === '<') { + if (xml.startsWith('', i) + i = end === -1 ? len : end + 3 + continue + } + if (xml.startsWith('', i + 9) + const data = xml.slice(i + 9, end === -1 ? len : end) + node.text += data + i = end === -1 ? len : end + 3 + continue + } + if (xml[i + 1] === '/') { + i += 2 + while (i < len && xml[i] !== '>') i++ + if (i < len) i++ + return node + } + node.children.push(parseNode()) + } else { + const start = i + while (i < len && xml[i] !== '<') i++ + node.text += decodeEntities(xml.slice(start, i)) + } + } + return node + } + + // Skip XML declaration and DOCTYPE + while (i < len) { + skipWhitespace() + if (xml.startsWith('', i) + i = end === -1 ? len : end + 2 + continue + } + if (xml.startsWith('', i) + i = end === -1 ? len : end + 3 + continue + } + if (xml.startsWith('', i) + i = end === -1 ? len : end + 1 + continue + } + if (xml[i] === '<') break + i++ + } + return parseNode() } /** - * Creates a typed SOAP client for a Workday service. - * Uses the `soap` npm package to parse the WSDL and auto-marshall JSON to XML. + * Converts a parsed XML node tree into the JS object shape that the previous + * `soap` library produced: nested objects keyed by local element name, + * attributes under `attributes`, repeated elements collapsed into arrays, + * and pure text nodes returned as strings. + */ +function nodeToValue(node: XmlNode): unknown { + const hasChildren = node.children.length > 0 + const trimmedText = node.text.trim() + const attrKeys = Object.keys(node.attributes).filter( + (k) => k !== 'xmlns' && !k.startsWith('xmlns:') + ) + + if (!hasChildren && attrKeys.length === 0) { + return trimmedText + } + + const obj: Record = {} + if (attrKeys.length > 0) { + const attrs: Record = {} + for (const k of attrKeys) { + const localKey = k.includes(':') ? k.slice(k.indexOf(':') + 1) : k + attrs[localKey] = node.attributes[k] + } + obj.attributes = attrs + } + + if (!hasChildren && trimmedText !== '') { + obj.$value = trimmedText + return obj + } + + for (const child of node.children) { + const key = child.localName + const value = nodeToValue(child) + if (key in obj) { + const existing = obj[key] + if (Array.isArray(existing)) { + existing.push(value) + } else { + obj[key] = [existing, value] + } + } else { + obj[key] = value + } + } + return obj +} + +function findFirst(node: XmlNode, localName: string): XmlNode | null { + if (node.localName === localName) return node + for (const child of node.children) { + const found = findFirst(child, localName) + if (found) return found + } + return null +} + +function extractFaultMessage(envelope: XmlNode): string | null { + const fault = findFirst(envelope, 'Fault') + if (!fault) return null + const faultstring = findFirst(fault, 'faultstring') + if (faultstring?.text.trim()) return faultstring.text.trim() + const reason = findFirst(fault, 'Reason') + if (reason) { + const text = findFirst(reason, 'Text') + if (text?.text.trim()) return text.text.trim() + } + const detail = findFirst(fault, 'detail') ?? findFirst(fault, 'Detail') + if (detail) { + const msg = findFirst(detail, 'Validation_Error') ?? findFirst(detail, 'Detail_Message') + if (msg?.text.trim()) return msg.text.trim() + } + return 'SOAP fault returned by Workday' +} + +async function callOperation( + operation: WorkdayOperation, + args: Record, + endpoint: string, + username: string, + password: string +): Promise<[WorkdaySoapResult, string, Record, string]> { + const envelope = buildEnvelope(operation, args, username, password) + + const response = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'text/xml; charset=utf-8', + SOAPAction: `""`, + }, + body: envelope, + }) + + const responseText = await response.text() + + let root: XmlNode + try { + root = parseXml(responseText) + } catch (err) { + logger.error('Failed to parse Workday SOAP response', { + operation, + status: response.status, + error: err instanceof Error ? err.message : String(err), + }) + throw new Error( + `Workday returned an unparseable response (HTTP ${response.status}): ${responseText.slice(0, 500)}` + ) + } + + const fault = extractFaultMessage(root) + if (fault) { + throw new Error(fault) + } + + if (!response.ok) { + throw new Error(`Workday SOAP request failed (HTTP ${response.status})`) + } + + const responseElement = findFirst(root, `${operation}_Response`) + const value = (responseElement ? nodeToValue(responseElement) : {}) as WorkdaySoapResult + + return [value, responseText, {}, envelope] +} + +/** + * Creates a typed SOAP client for a Workday service. The returned object + * exposes the same `Async` methods the previous `soap`-library + * client did, so existing call sites do not change. Internally this issues + * SOAP-over-HTTP requests directly with hand-built envelopes and an XML + * response parser — no WSDL fetch. */ export async function createWorkdaySoapClient( tenantUrl: string, @@ -157,17 +623,28 @@ export async function createWorkdaySoapClient( username: string, password: string ): Promise { - const wsdlUrl = buildWsdlUrl(tenantUrl, tenant, service) - logger.info('Creating Workday SOAP client', { service, wsdlUrl }) + const endpoint = buildServiceUrl(tenantUrl, tenant, service) + logger.info('Creating Workday SOAP client', { service, endpoint }) - const client = await soap.createClientAsync(wsdlUrl) - client.setSecurity(new soap.BasicAuthSecurity(username, password)) - return client as WorkdayClient + function bind(operation: WorkdayOperation): SoapOperationFn { + return (args) => callOperation(operation, args, endpoint, username, password) + } + + return { + Get_WorkersAsync: bind('Get_Workers'), + Get_OrganizationsAsync: bind('Get_Organizations'), + Put_ApplicantAsync: bind('Put_Applicant'), + Hire_EmployeeAsync: bind('Hire_Employee'), + Change_JobAsync: bind('Change_Job'), + Terminate_EmployeeAsync: bind('Terminate_Employee'), + Change_Personal_InformationAsync: bind('Change_Personal_Information'), + Put_Onboarding_Plan_AssignmentAsync: bind('Put_Onboarding_Plan_Assignment'), + } } /** * Builds a Workday object reference in the format the SOAP API expects. - * Generates: { ID: { attributes: { type: idType }, $value: idValue } } + * Generates: { ID: { attributes: { 'wd:type': idType }, $value: idValue } } */ export function wdRef(idType: string, idValue: string): { ID: WorkdayIdEntry } { return { diff --git a/bun.lock b/bun.lock index 9c5e1dedc68..29c91ea34bd 100644 --- a/bun.lock +++ b/bun.lock @@ -88,24 +88,24 @@ "@1password/sdk": "0.3.1", "@a2a-js/sdk": "0.3.7", "@anthropic-ai/sdk": "0.71.2", - "@aws-sdk/client-athena": "3.1024.0", - "@aws-sdk/client-bedrock-runtime": "3.940.0", - "@aws-sdk/client-cloudformation": "3.1019.0", - "@aws-sdk/client-cloudwatch": "3.940.0", - "@aws-sdk/client-cloudwatch-logs": "3.940.0", - "@aws-sdk/client-dynamodb": "3.940.0", - "@aws-sdk/client-iam": "3.1029.0", + "@aws-sdk/client-athena": "3.1032.0", + "@aws-sdk/client-bedrock-runtime": "3.1032.0", + "@aws-sdk/client-cloudformation": "3.1032.0", + "@aws-sdk/client-cloudwatch": "3.1032.0", + "@aws-sdk/client-cloudwatch-logs": "3.1032.0", + "@aws-sdk/client-dynamodb": "3.1032.0", + "@aws-sdk/client-iam": "3.1032.0", "@aws-sdk/client-identitystore": "3.1032.0", "@aws-sdk/client-organizations": "3.1032.0", - "@aws-sdk/client-rds-data": "3.940.0", - "@aws-sdk/client-s3": "^3.779.0", - "@aws-sdk/client-secrets-manager": "3.940.0", - "@aws-sdk/client-sesv2": "3.940.0", - "@aws-sdk/client-sqs": "3.947.0", + "@aws-sdk/client-rds-data": "3.1032.0", + "@aws-sdk/client-s3": "3.1032.0", + "@aws-sdk/client-secrets-manager": "3.1032.0", + "@aws-sdk/client-sesv2": "3.1032.0", + "@aws-sdk/client-sqs": "3.1032.0", "@aws-sdk/client-sso-admin": "3.1032.0", - "@aws-sdk/client-sts": "3.1029.0", - "@aws-sdk/lib-dynamodb": "3.940.0", - "@aws-sdk/s3-request-presigner": "^3.779.0", + "@aws-sdk/client-sts": "3.1032.0", + "@aws-sdk/lib-dynamodb": "3.1032.0", + "@aws-sdk/s3-request-presigner": "3.1032.0", "@azure/communication-email": "1.0.0", "@azure/storage-blob": "12.27.0", "@better-auth/sso": "1.3.12", @@ -120,7 +120,6 @@ "@modelcontextprotocol/sdk": "1.29.0", "@monaco-editor/react": "4.7.0", "@opentelemetry/api": "^1.9.0", - "@opentelemetry/exporter-jaeger": "2.1.0", "@opentelemetry/exporter-trace-otlp-http": "^0.200.0", "@opentelemetry/resources": "^2.0.0", "@opentelemetry/sdk-node": "^0.200.0", @@ -147,8 +146,8 @@ "@radix-ui/react-toggle": "^1.1.2", "@radix-ui/react-tooltip": "1.2.8", "@radix-ui/react-visually-hidden": "1.2.4", - "@react-email/components": "^0.0.34", - "@react-email/render": "2.0.0", + "@react-email/components": "0.5.7", + "@react-email/render": "2.0.8", "@sim/audit": "workspace:*", "@sim/logger": "workspace:*", "@sim/realtime-protocol": "workspace:*", @@ -161,15 +160,11 @@ "@tanstack/react-query": "5.90.8", "@tanstack/react-query-devtools": "5.90.2", "@trigger.dev/sdk": "4.4.3", - "@types/react-window": "2.0.0", - "@types/three": "0.177.0", "ajv": "8.18.0", "better-auth": "1.3.12", "better-auth-harmony": "1.3.1", "binary-extensions": "^2.0.0", "browser-image-compression": "^2.0.2", - "chalk": "5.6.2", - "chart.js": "4.5.1", "cheerio": "1.1.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -182,8 +177,6 @@ "docx": "^9.6.1", "docx-preview": "^0.3.7", "drizzle-orm": "^0.45.2", - "encoding": "0.1.13", - "entities": "6.0.1", "es-toolkit": "1.45.1", "ffmpeg-static": "5.3.0", "fluent-ffmpeg": "2.1.3", @@ -192,6 +185,7 @@ "google-auth-library": "10.5.0", "gray-matter": "^4.0.3", "groq-sdk": "^0.15.0", + "hast-util-to-html": "9.0.5", "html-to-image": "1.11.13", "html-to-text": "^9.0.5", "idb-keyval": "6.2.2", @@ -208,7 +202,6 @@ "jwt-decode": "^4.0.0", "lucide-react": "^0.479.0", "mammoth": "^1.9.0", - "marked": "17.0.4", "mermaid": "11.14.0", "micromatch": "4.0.8", "monaco-editor": "0.55.1", @@ -239,16 +232,16 @@ "react-simple-code-editor": "^0.14.1", "react-window": "2.2.3", "reactflow": "^11.11.4", - "redis": "5.10.0", "rehype-autolink-headings": "^7.1.0", "rehype-slug": "^6.0.0", "remark-breaks": "^4.0.0", "remark-gfm": "4.0.1", + "remark-parse": "11.0.0", + "remark-rehype": "11.1.2", "resend": "^4.1.2", "rss-parser": "3.13.0", "safe-regex2": "5.1.0", "sharp": "0.34.3", - "soap": "1.8.0", "socket.io-client": "4.8.1", "ssh2": "^1.17.0", "streamdown": "2.5.0", @@ -256,14 +249,13 @@ "svix": "1.88.0", "tailwind-merge": "^2.6.0", "tailwindcss-animate": "^1.0.7", - "thread-stream": "4.0.0", "three": "0.177.0", "twilio": "5.9.0", + "unified": "11.0.5", "unpdf": "1.4.0", - "uuid": "^11.1.0", "xlsx": "https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz", "zod": "4.3.6", - "zustand": "^4.5.7", + "zustand": "^5.0.13", }, "devDependencies": { "@sim/testing": "workspace:*", @@ -282,16 +274,15 @@ "@types/prismjs": "^1.26.5", "@types/react": "^19", "@types/react-dom": "^19", + "@types/react-window": "2.0.0", "@types/ssh2": "^1.15.5", + "@types/three": "0.177.0", "@vitejs/plugin-react": "^4.3.4", "@vitest/coverage-v8": "^3.0.8", "autoprefixer": "10.4.21", - "concurrently": "^9.1.0", - "critters": "0.0.25", - "dotenv": "^16.4.7", "jsdom": "^26.0.0", "postcss": "^8", - "react-email": "^4.0.13", + "react-email": "4.3.2", "tailwindcss": "^3.4.1", "typescript": "^5.7.3", "vite-tsconfig-paths": "^5.1.4", @@ -567,39 +558,39 @@ "@aws-crypto/util": ["@aws-crypto/util@5.2.0", "", { "dependencies": { "@aws-sdk/types": "^3.222.0", "@smithy/util-utf8": "^2.0.0", "tslib": "^2.6.2" } }, "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ=="], - "@aws-sdk/client-athena": ["@aws-sdk/client-athena@3.1024.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.26", "@aws-sdk/credential-provider-node": "^3.972.29", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.9", "@aws-sdk/middleware-user-agent": "^3.972.28", "@aws-sdk/region-config-resolver": "^3.972.10", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.14", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.13", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.28", "@smithy/middleware-retry": "^4.4.46", "@smithy/middleware-serde": "^4.2.16", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.1", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.44", "@smithy/util-defaults-mode-node": "^4.2.48", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.13", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-kKnsdeh58Se7GL+9HX56KWtjS55W3OuzZwGVXq20PXgY2N53d6+NI9I1w+X0cZJo2pz3JijiJ+3S76YYCBoprw=="], + "@aws-sdk/client-athena": ["@aws-sdk/client-athena@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-/3RrC4J644U1ZlqcGyGCRf2cyCH/xWs2B6PewlKWeyTq2uWSRtY+v5CkEQ51fRm2Y5wfhuxoU9FO1jKIKm9fSA=="], - "@aws-sdk/client-bedrock-runtime": ["@aws-sdk/client-bedrock-runtime@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/eventstream-handler-node": "3.936.0", "@aws-sdk/middleware-eventstream": "3.936.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/middleware-websocket": "3.936.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/eventstream-serde-browser": "^4.2.5", "@smithy/eventstream-serde-config-resolver": "^4.3.5", "@smithy/eventstream-serde-node": "^4.2.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-stream": "^4.5.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-Gs6UUQP1zt8vahOxJ3BADcb3B+2KldUNA3bKa+KdK58de7N7tLJFJfZuXhFGGtwyNPh1aw6phtdP6dauq3OLWA=="], + "@aws-sdk/client-bedrock-runtime": ["@aws-sdk/client-bedrock-runtime@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/eventstream-handler-node": "^3.972.14", "@aws-sdk/middleware-eventstream": "^3.972.10", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/middleware-websocket": "^3.972.16", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/token-providers": "3.1032.0", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/eventstream-serde-browser": "^4.2.14", "@smithy/eventstream-serde-config-resolver": "^4.3.14", "@smithy/eventstream-serde-node": "^4.2.14", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-stream": "^4.5.23", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-fSRz/48As9c3DeS+9ZWd7kk9171pJntCCuehHBDeprD9CPF+C+ATaVNJ5SOLE5RIBR2IHOVTwjAgJt/nkS/6Yg=="], - "@aws-sdk/client-cloudformation": ["@aws-sdk/client-cloudformation@3.1019.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.25", "@aws-sdk/credential-provider-node": "^3.972.27", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.9", "@aws-sdk/middleware-user-agent": "^3.972.26", "@aws-sdk/region-config-resolver": "^3.972.10", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.12", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.12", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.27", "@smithy/middleware-retry": "^4.4.44", "@smithy/middleware-serde": "^4.2.15", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.0", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.7", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.43", "@smithy/util-defaults-mode-node": "^4.2.47", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.12", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.2.13", "tslib": "^2.6.2" } }, "sha512-RNBtkQQ5IUqTdxaAe7ADwlJ/1qqW5kONLD1Mxr7PUWteEQwYR9ZJYscDul2qNkCWhu/vMKhk+qwJKPkdu2TNzA=="], + "@aws-sdk/client-cloudformation": ["@aws-sdk/client-cloudformation@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.2.16", "tslib": "^2.6.2" } }, "sha512-GfukIY7PEPup/tntZWizG3x3eOtfcw/hH6eeWZqkMoElRYZGXmbhWIBHX5OiMtuC50lfgQh3eS68NtUyS7suUg=="], - "@aws-sdk/client-cloudwatch": ["@aws-sdk/client-cloudwatch@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-compression": "^4.3.12", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "@smithy/util-waiter": "^4.2.5", "tslib": "^2.6.2" } }, "sha512-C35xpPntRAGdEg3X5iKpSUCBaP3yxYNo1U95qipN/X1e0/TYIDWHwGt8Z1ntRafK19jp5oVzhRQ+PD1JAPSEzA=="], + "@aws-sdk/client-cloudwatch": ["@aws-sdk/client-cloudwatch@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-compression": "^4.3.44", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.2.16", "tslib": "^2.6.2" } }, "sha512-SfgfDRzT2hM7PWXzNNf5uN276xu0UIeAY89p5lfCELechkuP5YRFQUP6RH1uortUpot6PsMKZv8tEaqXvEV7eQ=="], - "@aws-sdk/client-cloudwatch-logs": ["@aws-sdk/client-cloudwatch-logs@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/eventstream-serde-browser": "^4.2.5", "@smithy/eventstream-serde-config-resolver": "^4.3.5", "@smithy/eventstream-serde-node": "^4.2.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-7dEIO3D98IxA9IhqixPJbzQsBkk4TchHHpFdd0JOhlSlihWhiwbf3ijUePJVXYJxcpRRtMmAMtDRLDzCSO+ZHg=="], + "@aws-sdk/client-cloudwatch-logs": ["@aws-sdk/client-cloudwatch-logs@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/eventstream-serde-browser": "^4.2.14", "@smithy/eventstream-serde-config-resolver": "^4.3.14", "@smithy/eventstream-serde-node": "^4.2.14", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-pzvsXRUrlq5q0HTmpEUF07koRw1cikeWY4M5brPQimMBZx5VahiIVyacNwD1tr40rKwo72SyFDToBWSnXFVYKA=="], - "@aws-sdk/client-dynamodb": ["@aws-sdk/client-dynamodb@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-endpoint-discovery": "3.936.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "@smithy/util-waiter": "^4.2.5", "tslib": "^2.6.2" } }, "sha512-u2sXsNJazJbuHeWICvsj6RvNyJh3isedEfPvB21jK/kxcriK+dE/izlKC2cyxUjERCmku0zTFNzY9FhrLbYHjQ=="], + "@aws-sdk/client-dynamodb": ["@aws-sdk/client-dynamodb@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/dynamodb-codec": "^3.973.1", "@aws-sdk/middleware-endpoint-discovery": "^3.972.11", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.2.16", "tslib": "^2.6.2" } }, "sha512-kkXiZBNdWCQAg/8opqAu10TxzdpqMkcGrNAT2ScdfWhCpzYZ2pmSpP8W7BOlA32jYIWnYrEdb808UZsNWYBPAA=="], - "@aws-sdk/client-iam": ["@aws-sdk/client-iam@3.1029.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.27", "@aws-sdk/credential-provider-node": "^3.972.30", "@aws-sdk/middleware-host-header": "^3.972.9", "@aws-sdk/middleware-logger": "^3.972.9", "@aws-sdk/middleware-recursion-detection": "^3.972.10", "@aws-sdk/middleware-user-agent": "^3.972.29", "@aws-sdk/region-config-resolver": "^3.972.11", "@aws-sdk/types": "^3.973.7", "@aws-sdk/util-endpoints": "^3.996.6", "@aws-sdk/util-user-agent-browser": "^3.972.9", "@aws-sdk/util-user-agent-node": "^3.973.15", "@smithy/config-resolver": "^4.4.14", "@smithy/core": "^3.23.14", "@smithy/fetch-http-handler": "^5.3.16", "@smithy/hash-node": "^4.2.13", "@smithy/invalid-dependency": "^4.2.13", "@smithy/middleware-content-length": "^4.2.13", "@smithy/middleware-endpoint": "^4.4.29", "@smithy/middleware-retry": "^4.5.0", "@smithy/middleware-serde": "^4.2.17", "@smithy/middleware-stack": "^4.2.13", "@smithy/node-config-provider": "^4.3.13", "@smithy/node-http-handler": "^4.5.2", "@smithy/protocol-http": "^5.3.13", "@smithy/smithy-client": "^4.12.9", "@smithy/types": "^4.14.0", "@smithy/url-parser": "^4.2.13", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.45", "@smithy/util-defaults-mode-node": "^4.2.49", "@smithy/util-endpoints": "^3.3.4", "@smithy/util-middleware": "^4.2.13", "@smithy/util-retry": "^4.3.0", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.2.15", "tslib": "^2.6.2" } }, "sha512-v/5wWvrX3fveCP5UQ4qTCvvD9KCQ3dpnY6uEOCGpkAigli+xzEixl8xNQDCRi9G3KyrhvGaeE2SEfuuoCHX+gw=="], + "@aws-sdk/client-iam": ["@aws-sdk/client-iam@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.2.16", "tslib": "^2.6.2" } }, "sha512-dzLygZx+PIUJ1Iob2l6a3ToqRtF1FQzF+Ps8lPeFaJSibslUt12hmBGUJ7uIVvoXhGzRRsRwtXTCH++XZpVYag=="], "@aws-sdk/client-identitystore": ["@aws-sdk/client-identitystore@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-trBf/GudoOot6vGwgYFfimKC4ExtoC42kXvkoy4g7y6d0VWUpp5oIMRO7wp2w/hv4Y2bI4hDFpW8fhivR9LiEQ=="], "@aws-sdk/client-organizations": ["@aws-sdk/client-organizations@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-t8nnhltAkEXzkUg2BmwbxOBbZAA8wR04ajblWfYQWas7YxI/lmJsT/7nPsuIb6H0uEbuDOfN9Z8eS1CJrZTdZQ=="], - "@aws-sdk/client-rds-data": ["@aws-sdk/client-rds-data@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-68NH61MvS48CVPfzBNCPdCG4KnNjM+Uj/3DSw7rT9PJvdML9ARS4M2Uqco9POPw+Aj20KBumsEUd6FMVcYBXAA=="], + "@aws-sdk/client-rds-data": ["@aws-sdk/client-rds-data@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-/6TH33L7VF2mssAKhC5y5QT7RBUGgeVjYtAqLruXKihvnWSr/RGq/smeC+LvDC6omEEj3ib4W7n+VBWC5BFm/Q=="], - "@aws-sdk/client-s3": ["@aws-sdk/client-s3@3.1038.0", "", { "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.6", "@aws-sdk/credential-provider-node": "^3.972.37", "@aws-sdk/middleware-bucket-endpoint": "^3.972.10", "@aws-sdk/middleware-expect-continue": "^3.972.10", "@aws-sdk/middleware-flexible-checksums": "^3.974.14", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-location-constraint": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-sdk-s3": "^3.972.35", "@aws-sdk/middleware-ssec": "^3.972.10", "@aws-sdk/middleware-user-agent": "^3.972.36", "@aws-sdk/region-config-resolver": "^3.972.13", "@aws-sdk/signature-v4-multi-region": "^3.996.23", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.22", "@smithy/config-resolver": "^4.4.17", "@smithy/core": "^3.23.17", "@smithy/eventstream-serde-browser": "^4.2.14", "@smithy/eventstream-serde-config-resolver": "^4.3.14", "@smithy/eventstream-serde-node": "^4.2.14", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-blob-browser": "^4.2.15", "@smithy/hash-node": "^4.2.14", "@smithy/hash-stream-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/md5-js": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/middleware-retry": "^4.5.6", "@smithy/middleware-serde": "^4.2.20", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.6.1", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.49", "@smithy/util-defaults-mode-node": "^4.2.54", "@smithy/util-endpoints": "^3.4.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.3.0", "tslib": "^2.6.2" } }, "sha512-k60qm50bWkaqNfCJe1z28WaqgpztE0wbWVMZw6ZJcTOGfrWFhsJeLCEqtkH8w00iEozKx9GQwdQXz4G0sMGdKA=="], + "@aws-sdk/client-s3": ["@aws-sdk/client-s3@3.1032.0", "", { "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-bucket-endpoint": "^3.972.10", "@aws-sdk/middleware-expect-continue": "^3.972.10", "@aws-sdk/middleware-flexible-checksums": "^3.974.9", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-location-constraint": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-sdk-s3": "^3.972.30", "@aws-sdk/middleware-ssec": "^3.972.10", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/signature-v4-multi-region": "^3.996.18", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/eventstream-serde-browser": "^4.2.14", "@smithy/eventstream-serde-config-resolver": "^4.3.14", "@smithy/eventstream-serde-node": "^4.2.14", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-blob-browser": "^4.2.15", "@smithy/hash-node": "^4.2.14", "@smithy/hash-stream-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/md5-js": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-stream": "^4.5.23", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.2.16", "tslib": "^2.6.2" } }, "sha512-A1wjVhV3IgsZ5td2l4AWgK03EjZ+ldwbiorxuO1hPf7RHJtSdr6oq/gKzyUwP7Tm7ma/M2xS/tplg5C8XB8RWg=="], - "@aws-sdk/client-secrets-manager": ["@aws-sdk/client-secrets-manager@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-fpxSRsGyuXmyNqEwdGJUDWVgN0v8xR7tr32Quls3K+HnYlnBGFmISu5Pcc+BfwmrZHnPaVpPc+S3PUzTnFpOJg=="], + "@aws-sdk/client-secrets-manager": ["@aws-sdk/client-secrets-manager@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-gdSaBSaghzbCoAeCVbnbBkF1z5IN37+kWhWwHREbc6ulBn2gk+rJGu4jyPzeZGmpKHkICqosjlhB3jJkozWucQ=="], - "@aws-sdk/client-sesv2": ["@aws-sdk/client-sesv2@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/signature-v4-multi-region": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-jDQ4x2HwB2/UXBS7CTeSDiIb+sVsYGDyxTeXdrRAtqNdGv8kC54fbwokDiJ/mnMyB2gyXWw57BqeDJNkZuLmsw=="], + "@aws-sdk/client-sesv2": ["@aws-sdk/client-sesv2@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/signature-v4-multi-region": "^3.996.18", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-9DoEx6vqUwtPI+CxZctvMmSjpPYkzugD8oDZqiyE52Ep7J+0otmFgXVAFDNdp5kIjIJnzYgVl6e1RAHKJQcz5g=="], - "@aws-sdk/client-sqs": ["@aws-sdk/client-sqs@3.947.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/credential-provider-node": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-sdk-sqs": "3.946.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.947.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.7", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/md5-js": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.14", "@smithy/middleware-retry": "^4.4.14", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.13", "@smithy/util-defaults-mode-node": "^4.2.16", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-8tzFyYGAAnQg+G9eB5zAe0oEo+MJMZ3YEk+8EL4uf2zG5wKxJvTBJZr6U9I1CEXYUde374OyLMyKng+sWyN+wg=="], + "@aws-sdk/client-sqs": ["@aws-sdk/client-sqs@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-sdk-sqs": "^3.972.20", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/md5-js": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-n102sARTLi53Da0JT/2Kvg/bQ4bv+JqA+YQ8OlaM4CgsPn61sMv0x9PxdF6s/KbgZ2HMwYBszNzuvUttN+Beqg=="], "@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-SdqJGWVhmIURvCSgkDditHRO+ozubwZk9aCX9MK8qxyOndhobCndW1ozl3hX9psvMAo9Q4bppjuqy/GHWpjB+A=="], "@aws-sdk/client-sso-admin": ["@aws-sdk/client-sso-admin@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-nQZibW7Uwflbn7wC3CnA1hXIo34f2oxvPsfmAnzqyOhSHL6v1LNwElQywmZStiFjmATIlJVQKvCaj+/MAKscNw=="], - "@aws-sdk/client-sts": ["@aws-sdk/client-sts@3.1029.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.27", "@aws-sdk/credential-provider-node": "^3.972.30", "@aws-sdk/middleware-host-header": "^3.972.9", "@aws-sdk/middleware-logger": "^3.972.9", "@aws-sdk/middleware-recursion-detection": "^3.972.10", "@aws-sdk/middleware-user-agent": "^3.972.29", "@aws-sdk/region-config-resolver": "^3.972.11", "@aws-sdk/types": "^3.973.7", "@aws-sdk/util-endpoints": "^3.996.6", "@aws-sdk/util-user-agent-browser": "^3.972.9", "@aws-sdk/util-user-agent-node": "^3.973.15", "@smithy/config-resolver": "^4.4.14", "@smithy/core": "^3.23.14", "@smithy/fetch-http-handler": "^5.3.16", "@smithy/hash-node": "^4.2.13", "@smithy/invalid-dependency": "^4.2.13", "@smithy/middleware-content-length": "^4.2.13", "@smithy/middleware-endpoint": "^4.4.29", "@smithy/middleware-retry": "^4.5.0", "@smithy/middleware-serde": "^4.2.17", "@smithy/middleware-stack": "^4.2.13", "@smithy/node-config-provider": "^4.3.13", "@smithy/node-http-handler": "^4.5.2", "@smithy/protocol-http": "^5.3.13", "@smithy/smithy-client": "^4.12.9", "@smithy/types": "^4.14.0", "@smithy/url-parser": "^4.2.13", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.45", "@smithy/util-defaults-mode-node": "^4.2.49", "@smithy/util-endpoints": "^3.3.4", "@smithy/util-middleware": "^4.2.13", "@smithy/util-retry": "^4.3.0", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-9C2WAs0ECcQvaQWRBetVGjxlvNpVpNWTwIuf3oA106JOtb2EjxJ2s4JQQUPCiCH1qP9HzZ3Zf9MDEEJox0HT4Q=="], + "@aws-sdk/client-sts": ["@aws-sdk/client-sts@3.1032.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.1", "@aws-sdk/credential-provider-node": "^3.972.32", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.31", "@aws-sdk/region-config-resolver": "^3.972.12", "@aws-sdk/signature-v4-multi-region": "^3.996.18", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.7", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.17", "@smithy/config-resolver": "^4.4.16", "@smithy/core": "^3.23.15", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-retry": "^4.5.3", "@smithy/middleware-serde": "^4.2.18", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.5.3", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.47", "@smithy/util-defaults-mode-node": "^4.2.52", "@smithy/util-endpoints": "^3.4.1", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-FCLc5VWb+yz1xb/Jv0sXFGqIIs+bHZQWBKbPQKCuypF3wU/7UFygXuSXo9uJfwISKNGVHJwp+0136f8mqmzRcA=="], "@aws-sdk/core": ["@aws-sdk/core@3.974.6", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws-sdk/xml-builder": "^3.972.20", "@smithy/core": "^3.23.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-8Vu7zGxu+39ChR/s5J7nXBw3a2kMHAi0OfKT8ohgTVjX0qYed/8mIfdBb638oBmKrWCwwKjYAM5J/4gMJ8nAJA=="], @@ -621,17 +612,19 @@ "@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.972.36", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/nested-clients": "^3.997.4", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-uzrURO7frJhHQVVNR5zBJcCYeMYflmXcWBK1+MiBym2Dfjh6nXATrMixrmGZi+97Q7ETZ+y/4lUwAy0Nfnznjw=="], - "@aws-sdk/endpoint-cache": ["@aws-sdk/endpoint-cache@3.893.0", "", { "dependencies": { "mnemonist": "0.38.3", "tslib": "^2.6.2" } }, "sha512-KSwTfyLZyNLszz5f/yoLC+LC+CRKpeJii/+zVAy7JUOQsKhSykiRUPYUx7o2Sdc4oJfqqUl26A/jSttKYnYtAA=="], + "@aws-sdk/dynamodb-codec": ["@aws-sdk/dynamodb-codec@3.973.8", "", { "dependencies": { "@aws-sdk/core": "^3.974.8", "@smithy/core": "^3.23.17", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "tslib": "^2.6.2" } }, "sha512-dYQ/cQqHZd23hcl8oEGwPphTqyGnmvf2HrVmz4J90Q5Bv89oJjlwcBcifiiTvApqsVpx7Pr0IebMpkYwWJvZlQ=="], + + "@aws-sdk/endpoint-cache": ["@aws-sdk/endpoint-cache@3.972.5", "", { "dependencies": { "mnemonist": "0.38.3", "tslib": "^2.6.2" } }, "sha512-itVdge0NozgtgmtbZ25FVwWU3vGlE7x7feE/aOEJNkQfEpbkrF8Rj1QmnK+2blFfYE1xWt/iU+6/jUp/pv1+MA=="], - "@aws-sdk/eventstream-handler-node": ["@aws-sdk/eventstream-handler-node@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/eventstream-codec": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-4zIbhdRmol2KosIHmU31ATvNP0tkJhDlRj9GuawVJoEnMvJA1pd2U3SRdiOImJU3j8pT46VeS4YMmYxfjGHByg=="], + "@aws-sdk/eventstream-handler-node": ["@aws-sdk/eventstream-handler-node@3.972.14", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/eventstream-codec": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-m4X56gxG76/CKfxNVbOFuYwnAZcHgS6HOH8lgp15HoGHIAVTcZfZrXvcYzJFOMLEJgVn+JHBu6EiNV+xSNXXFg=="], - "@aws-sdk/lib-dynamodb": ["@aws-sdk/lib-dynamodb@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/util-dynamodb": "3.940.0", "@smithy/core": "^3.18.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "@aws-sdk/client-dynamodb": "^3.940.0" } }, "sha512-5ApYAix2wvJuMszj1lrpg8lm4ipoZMFO8crxtzsdAvxM8TV5bKSRQQ2GA3CMIODrBuSzpXvWueHHrfkx05ZAQw=="], + "@aws-sdk/lib-dynamodb": ["@aws-sdk/lib-dynamodb@3.1032.0", "", { "dependencies": { "@aws-sdk/core": "^3.974.1", "@aws-sdk/util-dynamodb": "^3.996.2", "@smithy/core": "^3.23.15", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "peerDependencies": { "@aws-sdk/client-dynamodb": "^3.1032.0" } }, "sha512-rYGhqP1H0Fy4r1yvWTmEAx0qqy1Zd9OzI8pPkXo6KSEDjZ4EwU+6QN1V+KLX3XTU6FQouF5LTvqLtl/CW4gxyQ=="], "@aws-sdk/middleware-bucket-endpoint": ["@aws-sdk/middleware-bucket-endpoint@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-arn-parser": "^3.972.3", "@smithy/node-config-provider": "^4.3.14", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-Vbc2frZH7wXlMNd+ZZSXUEs/l1Sv8Jj4zUnIfwrYF5lwaLdXHZ9xx4U3rjUcaye3HRhFVc+E5DbBxpRAbB16BA=="], - "@aws-sdk/middleware-endpoint-discovery": ["@aws-sdk/middleware-endpoint-discovery@3.936.0", "", { "dependencies": { "@aws-sdk/endpoint-cache": "3.893.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wNJZ8PDw0eQK2x4z1q8JqiDvw9l9xd36EoklVT2CIBt8FnqGdrMGjAx93RRbH3G6Fmvwoe+D3VJXbWHBlhD0Bw=="], + "@aws-sdk/middleware-endpoint-discovery": ["@aws-sdk/middleware-endpoint-discovery@3.972.11", "", { "dependencies": { "@aws-sdk/endpoint-cache": "^3.972.5", "@aws-sdk/types": "^3.973.8", "@smithy/node-config-provider": "^4.3.14", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-vXARCZVFQHdsd6qPPZyC/hh+5x2XsCYKqUQDCqnUlpGpChMpDojOOacQWdLJ+FFXKN8X3cmLOGrtgx/zysCKqQ=="], - "@aws-sdk/middleware-eventstream": ["@aws-sdk/middleware-eventstream@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-XQSH8gzLkk8CDUDxyt4Rdm9owTpRIPdtg2yw9Y2Wl5iSI55YQSiC3x8nM3c4Y4WqReJprunFPK225ZUDoYCfZA=="], + "@aws-sdk/middleware-eventstream": ["@aws-sdk/middleware-eventstream@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-QUqLs7Af1II9X4fCRAu+EGHG3KHyOp4RkuLhRKoA3NuFlh6TL8i+zXBl8w2LUxqm44B/Kom45hgSlwA1SpTsXQ=="], "@aws-sdk/middleware-expect-continue": ["@aws-sdk/middleware-expect-continue@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-2Yn0f1Qiq/DjxYR3wfI3LokXnjOhFM7Ssn4LTdFDIxRMCE6I32MAsVnhPX1cUZsuVA9tiZtwwhlSLAtFGxAZlQ=="], @@ -647,29 +640,29 @@ "@aws-sdk/middleware-sdk-s3": ["@aws-sdk/middleware-sdk-s3@3.972.35", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-arn-parser": "^3.972.3", "@smithy/core": "^3.23.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-lLppaNTAz+wNgLdi4FtHzrlwrGF0ODTnBWHBaFg85SKs0eJ+M+tP5ifrA8f/0lNd+Ak3MC1NGC6RavV3ny4HTg=="], - "@aws-sdk/middleware-sdk-sqs": ["@aws-sdk/middleware-sdk-sqs@3.946.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/util-hex-encoding": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-+KedlcXUqA1Bdafvw264SWvwyHYvFxn47y831tEKc85fp5VF5LGE9uMlU13hsWySftLmDd/ZFwSQI6RN2zSpAg=="], + "@aws-sdk/middleware-sdk-sqs": ["@aws-sdk/middleware-sdk-sqs@3.972.22", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-DtR3mEiOUJcnEX/QuXmvbJto6xvQzp2ftnHb29c0aQYdmmzbKf0gsu9ovx1i/yy4ZR6m0rttTucS0iiP32dlGA=="], "@aws-sdk/middleware-ssec": ["@aws-sdk/middleware-ssec@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-Gli9A0u8EVVb+5bFDGS/QbSVg28w/wpEidg1ggVcSj65BDTdGR6punsOcVjqdiu1i42WHWo51MCvARPIIz9juw=="], "@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.972.36", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@smithy/core": "^3.23.17", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-retry": "^4.3.5", "tslib": "^2.6.2" } }, "sha512-O2beToxguBvrZFFZ+fFgPbbae8MvyIBjQ6lImee4APHEXXNAD5ZJ2ayLF1mb7rsKw86TM81y5czg82bZncjSjg=="], - "@aws-sdk/middleware-websocket": ["@aws-sdk/middleware-websocket@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/util-format-url": "3.936.0", "@smithy/eventstream-codec": "^4.2.5", "@smithy/eventstream-serde-browser": "^4.2.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/types": "^4.9.0", "@smithy/util-hex-encoding": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-bPe3rqeugyj/MmjP0yBSZox2v1Wa8Dv39KN+RxVbQroLO8VUitBo6xyZ0oZebhZ5sASwSg58aDcMlX0uFLQnTA=="], + "@aws-sdk/middleware-websocket": ["@aws-sdk/middleware-websocket@3.972.16", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-format-url": "^3.972.10", "@smithy/eventstream-codec": "^4.2.14", "@smithy/eventstream-serde-browser": "^4.2.14", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-86+S9oCyRVGzoMRpQhxkArp7kD2K75GPmaNevd9B6EyNhWoNvnCZZ3WbgN4j7ZT+jvtvBCGZvI2XHsWZJ+BRIg=="], - "@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-x0mdv6DkjXqXEcQj3URbCltEzW6hoy/1uIL+i8gExP6YKrnhiZ7SzuB4gPls2UOpK5UqLiqXjhRLfBb1C9i4Dw=="], + "@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.997.4", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.6", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.36", "@aws-sdk/region-config-resolver": "^3.972.13", "@aws-sdk/signature-v4-multi-region": "^3.996.23", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.22", "@smithy/config-resolver": "^4.4.17", "@smithy/core": "^3.23.17", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/middleware-retry": "^4.5.6", "@smithy/middleware-serde": "^4.2.20", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.6.1", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.49", "@smithy/util-defaults-mode-node": "^4.2.54", "@smithy/util-endpoints": "^3.4.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-4Sf+WY1lMJzXlw5MiyCMe/UzdILCwvuaHThbqMXS6dfh9gZy3No360I42RXquOI/ULUOhWy2HCyU0Fp20fQGPQ=="], "@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.972.13", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/config-resolver": "^4.4.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-CvJ2ZIjK/jVD/lbOpowBVElJyC1YxLTIJ13yM0AEo0t2v7swOzGjSA6lJGH+DwZXQhcjUjoYwc8bVYCX5MDr1A=="], - "@aws-sdk/s3-request-presigner": ["@aws-sdk/s3-request-presigner@3.1038.0", "", { "dependencies": { "@aws-sdk/signature-v4-multi-region": "^3.996.23", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-format-url": "^3.972.10", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-2PNCm+2Mx8v2GKRREKMS3PavahzRhmMMJjuJxUpLneQV4w3oMs2bpme62oU6l+hip1pyeyPimWHeabjhaURocw=="], + "@aws-sdk/s3-request-presigner": ["@aws-sdk/s3-request-presigner@3.1032.0", "", { "dependencies": { "@aws-sdk/signature-v4-multi-region": "^3.996.18", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-format-url": "^3.972.10", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-LFaI5JQhiOmJDjKK02ir9oERU9AmxdyEvzv332oPDzAzWeNH06sZ1WsF3xRBBE5tbEH2jIc79N8EqDCY0s5kKQ=="], "@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.996.23", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "^3.972.35", "@aws-sdk/types": "^3.973.8", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-wBbys3Y53Ikly556vyADurKpYQHXS7Jjaskbz+Ga9PZCz7PB/9f3VdKbDlz7dqIzn+xwz7L/a6TR4iXcOi8IRw=="], - "@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-k5qbRe/ZFjW9oWEdzLIa2twRVIEx7p/9rutofyrRysrtEnYh3HAWCngAnwbgKMoiwa806UzcTRx0TjyEpnKcCg=="], + "@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.1032.0", "", { "dependencies": { "@aws-sdk/core": "^3.974.1", "@aws-sdk/nested-clients": "^3.996.21", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-n+PU8Z+gll7p3wDrH+Wo6fkt8sPrVnq30YYM6Ryga95oJlEneNMEbDHj0iqjMX3V7gaGdJo/hJWyPo4lscP+mA=="], "@aws-sdk/types": ["@aws-sdk/types@3.973.8", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-gjlAdtHMbtR9X5iIhVUvbVcy55KnznpC6bkDUWW9z915bi0ckdUr5cjf16Kp6xq0bP5HBD2xzgbL9F9Quv5vUw=="], "@aws-sdk/util-arn-parser": ["@aws-sdk/util-arn-parser@3.972.3", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-HzSD8PMFrvgi2Kserxuff5VitNq2sgf3w9qxmskKDiDTThWfVteJxuCS9JXiPIPtmCrp+7N9asfIaVhBFORllA=="], - "@aws-sdk/util-dynamodb": ["@aws-sdk/util-dynamodb@3.940.0", "", { "dependencies": { "tslib": "^2.6.2" }, "peerDependencies": { "@aws-sdk/client-dynamodb": "^3.940.0" } }, "sha512-T8UTYtCYSPxktnk68fKBdWztnqdTQItJwi/8N9lsvp20alJ15wCQsvQR+GKB5p4TCKxOPyNEirkcrNlf5TKppA=="], + "@aws-sdk/util-dynamodb": ["@aws-sdk/util-dynamodb@3.996.2", "", { "dependencies": { "tslib": "^2.6.2" }, "peerDependencies": { "@aws-sdk/client-dynamodb": "^3.1003.0" } }, "sha512-ddpwaZmjBzcApYN7lgtAXjk+u+GO8fiPsxzuc59UqP+zqdxI1gsenPvkyiHiF9LnYnyRGijz6oN2JylnN561qQ=="], "@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.996.8", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-endpoints": "^3.4.2", "tslib": "^2.6.2" } }, "sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g=="], @@ -1113,9 +1106,7 @@ "@opentelemetry/context-async-hooks": ["@opentelemetry/context-async-hooks@2.0.0", "", { "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-IEkJGzK1A9v3/EHjXh3s2IiFc6L4jfK+lNgKVgUjeUJQRRhnVFMIO3TAvKwonm9O1HebCuoOt98v8bZW7oVQHA=="], - "@opentelemetry/core": ["@opentelemetry/core@2.1.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-RMEtHsxJs/GiHHxYT58IY57UXAQTuUnZVco6ymDEqTNlJKTimM4qPUPVe8InNFyBjhHBEAx4k3Q8LtNayBsbUQ=="], - - "@opentelemetry/exporter-jaeger": ["@opentelemetry/exporter-jaeger@2.1.0", "", { "dependencies": { "@opentelemetry/core": "2.1.0", "@opentelemetry/sdk-trace-base": "2.1.0", "@opentelemetry/semantic-conventions": "^1.29.0", "jaeger-client": "^3.15.0" }, "peerDependencies": { "@opentelemetry/api": "^1.0.0" } }, "sha512-qtUMsp8061pQn6ZN9dngH6okiiF0NlHYBLWprzLeeCmNN7i5UHM+V8GmxvUH4L/zXlNBsySq7p3fZHIIbmK9xg=="], + "@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], "@opentelemetry/exporter-logs-otlp-grpc": ["@opentelemetry/exporter-logs-otlp-grpc@0.200.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.0.0", "@opentelemetry/otlp-exporter-base": "0.200.0", "@opentelemetry/otlp-grpc-exporter-base": "0.200.0", "@opentelemetry/otlp-transformer": "0.200.0", "@opentelemetry/sdk-logs": "0.200.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+3MDfa5YQPGM3WXxW9kqGD85Q7s9wlEMVNhXXG7tYFLnIeaseUt9YtCeFhEDFzfEktacdFpOtXmJuNW8cHbU5A=="], @@ -1169,8 +1160,6 @@ "@orama/orama": ["@orama/orama@3.1.18", "", {}, "sha512-a61ljmRVVyG5MC/698C8/FfFDw5a8LOIvyOLW5fztgUXqUpc1jOfQzOitSCbge657OgXXThmY3Tk8fpiDb4UcA=="], - "@paralleldrive/cuid2": ["@paralleldrive/cuid2@2.3.1", "", { "dependencies": { "@noble/hashes": "^1.1.5" } }, "sha512-XO7cAxhnTZl0Yggq6jOgjiOHhbgcO4NqFqwSmQpjK3b6TEE6Uj/jfSk6wzYyemh3+I0sHirKSetjQwn5cZktFw=="], - "@pdf-lib/standard-fonts": ["@pdf-lib/standard-fonts@1.0.0", "", { "dependencies": { "pako": "^1.0.6" } }, "sha512-hU30BK9IUN/su0Mn9VdlVKsWBS6GyhVfqjwl1FjZN4TxP6cCw0jP2w7V3Hf5uX7M0AZJ16vey9yE0ny7Sa59ZA=="], "@pdf-lib/upng": ["@pdf-lib/upng@1.0.1", "", { "dependencies": { "pako": "^1.0.10" } }, "sha512-dQK2FUMQtowVP00mtIksrlZhdFXQZPC+taih1q4CvPZ5vqdxR/LKBaFg0oAfzd1GlHZXXSPdQfzQnt+ViGvEIQ=="], @@ -1331,17 +1320,17 @@ "@radix-ui/rect": ["@radix-ui/rect@1.1.1", "", {}, "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw=="], - "@react-email/body": ["@react-email/body@0.0.11", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-ZSD2SxVSgUjHGrB0Wi+4tu3MEpB4fYSbezsFNEJk2xCWDBkFiOeEsjTmR5dvi+CxTK691hQTQlHv0XWuP7ENTg=="], + "@react-email/body": ["@react-email/body@0.1.0", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-o1bcSAmDYNNHECbkeyceCVPGmVsYvT+O3sSO/Ct7apKUu3JphTi31hu+0Nwqr/pgV5QFqdoT5vdS3SW5DJFHgQ=="], - "@react-email/button": ["@react-email/button@0.0.19", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-HYHrhyVGt7rdM/ls6FuuD6XE7fa7bjZTJqB2byn6/oGsfiEZaogY77OtoLL/mrQHjHjZiJadtAMSik9XLcm7+A=="], + "@react-email/button": ["@react-email/button@0.2.0", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-8i+v6cMxr2emz4ihCrRiYJPp2/sdYsNNsBzXStlcA+/B9Umpm5Jj3WJKYpgTPM+aeyiqlG/MMI1AucnBm4f1oQ=="], - "@react-email/code-block": ["@react-email/code-block@0.0.11", "", { "dependencies": { "prismjs": "1.29.0" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-4D43p+LIMjDzm66gTDrZch0Flkip5je91mAT7iGs6+SbPyalHgIA+lFQoQwhz/VzHHLxuD0LV6gwmU/WUQ2WEg=="], + "@react-email/code-block": ["@react-email/code-block@0.1.0", "", { "dependencies": { "prismjs": "^1.30.0" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jSpHFsgqnQXxDIssE4gvmdtFncaFQz5D6e22BnVjcCPk/udK+0A9jRwGFEG8JD2si9ZXBmU4WsuqQEczuZn4ww=="], "@react-email/code-inline": ["@react-email/code-inline@0.0.5", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-MmAsOzdJpzsnY2cZoPHFPk6uDO/Ncpb4Kh1hAt9UZc1xOW3fIzpe1Pi9y9p6wwUmpaeeDalJxAxH6/fnTquinA=="], "@react-email/column": ["@react-email/column@0.0.13", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-Lqq17l7ShzJG/d3b1w/+lVO+gp2FM05ZUo/nW0rjxB8xBICXOVv6PqjDnn3FXKssvhO5qAV20lHM6S+spRhEwQ=="], - "@react-email/components": ["@react-email/components@0.0.34", "", { "dependencies": { "@react-email/body": "0.0.11", "@react-email/button": "0.0.19", "@react-email/code-block": "0.0.11", "@react-email/code-inline": "0.0.5", "@react-email/column": "0.0.13", "@react-email/container": "0.0.15", "@react-email/font": "0.0.9", "@react-email/head": "0.0.12", "@react-email/heading": "0.0.15", "@react-email/hr": "0.0.11", "@react-email/html": "0.0.11", "@react-email/img": "0.0.11", "@react-email/link": "0.0.12", "@react-email/markdown": "0.0.14", "@react-email/preview": "0.0.12", "@react-email/render": "1.0.5", "@react-email/row": "0.0.12", "@react-email/section": "0.0.16", "@react-email/tailwind": "1.0.4", "@react-email/text": "0.1.0" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-9aUJJ4Yu5Cd5++2GHwdkmOHCghi0vPP/aZwMCGNNTovBTDCI3mc8YIUrDR7JfscrdkPK4s/E9AoD5lX6d/zITA=="], + "@react-email/components": ["@react-email/components@0.5.7", "", { "dependencies": { "@react-email/body": "0.1.0", "@react-email/button": "0.2.0", "@react-email/code-block": "0.1.0", "@react-email/code-inline": "0.0.5", "@react-email/column": "0.0.13", "@react-email/container": "0.0.15", "@react-email/font": "0.0.9", "@react-email/head": "0.0.12", "@react-email/heading": "0.0.15", "@react-email/hr": "0.0.11", "@react-email/html": "0.0.11", "@react-email/img": "0.0.11", "@react-email/link": "0.0.12", "@react-email/markdown": "0.0.16", "@react-email/preview": "0.0.13", "@react-email/render": "1.4.0", "@react-email/row": "0.0.12", "@react-email/section": "0.0.16", "@react-email/tailwind": "1.2.2", "@react-email/text": "0.1.5" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-ECyVoyDcev2FSQ7C0buXaIJ0+6MRDXNUbCOZwBRrlLdCCRjap2b4+MHrYSTXFzo5kqfjjRoyo/2PbJXFQni67g=="], "@react-email/container": ["@react-email/container@0.0.15", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-Qo2IQo0ru2kZq47REmHW3iXjAQaKu4tpeq/M8m1zHIVwKduL2vYOBQWbC2oDnMtWPmkBjej6XxgtZByxM6cCFg=="], @@ -1359,19 +1348,19 @@ "@react-email/link": ["@react-email/link@0.0.12", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-vF+xxQk2fGS1CN7UPQDbzvcBGfffr+GjTPNiWM38fhBfsLv6A/YUfaqxWlmL7zLzVmo0K2cvvV9wxlSyNba1aQ=="], - "@react-email/markdown": ["@react-email/markdown@0.0.14", "", { "dependencies": { "md-to-react-email": "5.0.5" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-5IsobCyPkb4XwnQO8uFfGcNOxnsg3311GRXhJ3uKv51P7Jxme4ycC/MITnwIZ10w2zx7HIyTiqVzTj4XbuIHbg=="], + "@react-email/markdown": ["@react-email/markdown@0.0.16", "", { "dependencies": { "marked": "^15.0.12" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-KSUHmoBMYhvc6iGwlIDkm0DRGbGQ824iNjLMCJsBVUoKHGQYs7F/N3b1tnS1YzRUX+GwHIexSsHuIUEi1m+8OQ=="], - "@react-email/preview": ["@react-email/preview@0.0.12", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-g/H5fa9PQPDK6WUEG7iTlC19sAktI23qyoiJtMLqQiXFCfWeQMhqjLGKeLSKkfzszqmfJCjZtpSiKtBoOdxp3Q=="], + "@react-email/preview": ["@react-email/preview@0.0.13", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-F7j9FJ0JN/A4d7yr+aw28p4uX7VLWs7hTHtLo7WRyw4G+Lit6Zucq4UWKRxJC8lpsUdzVmG7aBJnKOT+urqs/w=="], - "@react-email/render": ["@react-email/render@2.0.0", "", { "dependencies": { "html-to-text": "^9.0.5", "prettier": "^3.5.3" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-rdjNj6iVzv8kRKDPFas+47nnoe6B40+nwukuXwY4FCwM7XBg6tmYr+chQryCuavUj2J65MMf6fztk1bxOUiSVA=="], + "@react-email/render": ["@react-email/render@2.0.8", "", { "dependencies": { "html-to-text": "^9.0.5", "prettier": "^3.5.3" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-5udvVr3U/WuGJZfLdLBOhkzrqRWd2Q5ZYmF7ppcy7FzWcwgshdqLMNqJOXcVzAXJXg/2bm7D+WGJzTtZOZMQnQ=="], "@react-email/row": ["@react-email/row@0.0.12", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-HkCdnEjvK3o+n0y0tZKXYhIXUNPDx+2vq1dJTmqappVHXS5tXS6W5JOPZr5j+eoZ8gY3PShI2LWj5rWF7ZEtIQ=="], "@react-email/section": ["@react-email/section@0.0.16", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-FjqF9xQ8FoeUZYKSdt8sMIKvoT9XF8BrzhT3xiFKdEMwYNbsDflcjfErJe3jb7Wj/es/lKTbV5QR1dnLzGpL3w=="], - "@react-email/tailwind": ["@react-email/tailwind@1.0.4", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-tJdcusncdqgvTUYZIuhNC6LYTfL9vNTSQpwWdTCQhQ1lsrNCEE4OKCSdzSV3S9F32pi0i0xQ+YPJHKIzGjdTSA=="], + "@react-email/tailwind": ["@react-email/tailwind@1.2.2", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-heO9Khaqxm6Ulm6p7HQ9h01oiiLRrZuuEQuYds/O7Iyp3c58sMVHZGIxiRXO/kSs857NZQycpjewEVKF3jhNTw=="], - "@react-email/text": ["@react-email/text@0.1.0", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-LG+gEuxpoIiOojkv40iktP8UVjkJVZ+ksEEuf7zRvrcwLcVuzYyirlWdkGr4Vu/AhsD4FDRoxDWlWvLTx+WHUg=="], + "@react-email/text": ["@react-email/text@0.1.5", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-o5PNHFSE085VMXayxH+SJ1LSOtGsTv+RpNKnTiJDrJUwoBu77G3PlKOsZZQHCNyD28WsQpl9v2WcJLbQudqwPg=="], "@reactflow/background": ["@reactflow/background@11.3.14", "", { "dependencies": { "@reactflow/core": "11.11.4", "classcat": "^5.0.3", "zustand": "^4.4.1" }, "peerDependencies": { "react": ">=17", "react-dom": ">=17" } }, "sha512-Gewd7blEVT5Lh6jqrvOgd4G6Qk17eGKQfsDXgyRSqM+CTwDqRldG2LsWN4sNeno6sbqVIC2fZ+rAUBFA9ZEUDA=="], @@ -1903,8 +1892,6 @@ "ajv-formats": ["ajv-formats@3.0.1", "", { "dependencies": { "ajv": "^8.0.0" } }, "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ=="], - "ansi-color": ["ansi-color@0.2.2", "", {}, "sha512-qPx7iZZDHITYrrfzaUFXQpIcF2xYifcQHQflP1pFz8yY3lfU6GgCHb0+hJD7nimYKO7f2iaYYwBpZ+GaNcAhcA=="], - "ansi-escapes": ["ansi-escapes@4.3.2", "", { "dependencies": { "type-fest": "^0.21.3" } }, "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ=="], "ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], @@ -1929,8 +1916,6 @@ "array-flatten": ["array-flatten@1.1.1", "", {}, "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="], - "asap": ["asap@2.0.6", "", {}, "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA=="], - "asn1": ["asn1@0.2.6", "", { "dependencies": { "safer-buffer": "~2.1.0" } }, "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ=="], "asn1js": ["asn1js@3.0.10", "", { "dependencies": { "pvtsutils": "^1.3.6", "pvutils": "^1.1.5", "tslib": "^2.8.1" } }, "sha512-S2s3aOytiKdFRdulw2qPE51MzjzVOisppcVv7jVFR+Kw0kxwvFrDcYA0h7Ndqbmj0HkMIXYWaoj7fli8kgx1eg=="], @@ -1957,8 +1942,6 @@ "axios": ["axios@1.15.2", "", { "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", "proxy-from-env": "^2.1.0" } }, "sha512-wLrXxPtcrPTsNlJmKjkPnNPK2Ihe0hn0wGSaTEiHRPxwjvJwT3hKmXF4dpqxmPO9SoNb2FsYXj/xEo0gHN+D5A=="], - "axios-ntlm": ["axios-ntlm@1.4.6", "", { "dependencies": { "axios": "^1.12.2", "des.js": "^1.1.0", "dev-null": "^0.1.1", "js-md4": "^0.3.2" } }, "sha512-4nR5cbVEBfPMTFkd77FEDpDuaR205JKibmrkaQyNwGcCx0szWNpRZaL0jZyMx4+mVY2PXHjRHuJafv9Oipl0Kg=="], - "b4a": ["b4a@1.8.0", "", { "peerDependencies": { "react-native-b4a": "*" }, "optionalPeers": ["react-native-b4a"] }, "sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg=="], "bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="], @@ -2033,8 +2016,6 @@ "bufferutil": ["bufferutil@4.1.0", "", { "dependencies": { "node-gyp-build": "^4.3.0" } }, "sha512-ZMANVnAixE6AWWnPzlW2KpUrxhm9woycYvPOo67jWHyFowASTEd9s+QN1EIMsSDtwhIxN4sWE1jotpuDUIgyIw=="], - "bufrw": ["bufrw@1.4.0", "", { "dependencies": { "ansi-color": "^0.2.1", "error": "^7.0.0", "hexer": "^1.5.0", "xtend": "^4.0.0" } }, "sha512-sWm8iPbqvL9+5SiYxXH73UOkyEbGQg7kyHQmReF89WJHQJw2eV4P/yZ0E+b71cczJ4pPobVhXxgQcmfSTgGHxQ=="], - "buildcheck": ["buildcheck@0.0.7", "", {}, "sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA=="], "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], @@ -2145,8 +2126,6 @@ "concat-stream": ["concat-stream@2.0.0", "", { "dependencies": { "buffer-from": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.0.2", "typedarray": "^0.0.6" } }, "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A=="], - "concurrently": ["concurrently@9.2.1", "", { "dependencies": { "chalk": "4.1.2", "rxjs": "7.8.2", "shell-quote": "1.8.3", "supports-color": "8.1.1", "tree-kill": "1.2.2", "yargs": "17.7.2" }, "bin": { "conc": "dist/bin/concurrently.js", "concurrently": "dist/bin/concurrently.js" } }, "sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng=="], - "confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="], "consola": ["consola@3.4.2", "", {}, "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA=="], @@ -2173,8 +2152,6 @@ "cpu-features": ["cpu-features@0.0.10", "", { "dependencies": { "buildcheck": "~0.0.6", "nan": "^2.19.0" } }, "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA=="], - "critters": ["critters@0.0.25", "", { "dependencies": { "chalk": "^4.1.0", "css-select": "^5.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.2", "htmlparser2": "^8.0.2", "postcss": "^8.4.23", "postcss-media-query-parser": "^0.2.3" } }, "sha512-ROF/tjJyyRdM8/6W0VqoN5Ql05xAGnkf5b7f3sTEl1bI5jTQQf8O918RD/V9tEb9pRY/TKcvJekDbJtniHyPtQ=="], - "croner": ["croner@9.1.0", "", {}, "sha512-p9nwwR4qyT5W996vBZhdvBCnMhicY5ytZkR4D1Xj0wuTDEiMnjwR57Q3RXYY/s0EpX6Ay3vgIcfaR+ewGHsi+g=="], "cronstrue": ["cronstrue@3.3.0", "", { "bin": { "cronstrue": "bin/cli.js" } }, "sha512-iwJytzJph1hosXC09zY8F5ACDJKerr0h3/2mOxg9+5uuFObYlgK0m35uUPk4GCvhHc2abK7NfnR9oMqY0qZFAg=="], @@ -2325,8 +2302,6 @@ "dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="], - "des.js": ["des.js@1.1.0", "", { "dependencies": { "inherits": "^2.0.1", "minimalistic-assert": "^1.0.0" } }, "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg=="], - "destr": ["destr@2.0.5", "", {}, "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA=="], "destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="], @@ -2335,14 +2310,10 @@ "detect-node-es": ["detect-node-es@1.1.0", "", {}, "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ=="], - "dev-null": ["dev-null@0.1.1", "", {}, "sha512-nMNZG0zfMgmdv8S5O0TM5cpwNbGKRGPCxVsr0SmA3NZZy9CYBbuNLL0PD3Acx9e5LIUgwONXtM9kM6RlawPxEQ=="], - "devlop": ["devlop@1.1.0", "", { "dependencies": { "dequal": "^2.0.0" } }, "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA=="], "devtools-protocol": ["devtools-protocol@0.0.1464554", "", {}, "sha512-CAoP3lYfwAGQTaAXYvA6JZR0fjGUb7qec1qf4mToyoH2TZgUFeIqYcjh6f9jNuhHfuZiEdH+PONHYrLhRQX6aw=="], - "dezalgo": ["dezalgo@1.0.4", "", { "dependencies": { "asap": "^2.0.0", "wrappy": "1" } }, "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig=="], - "didyoumean": ["didyoumean@1.2.2", "", {}, "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="], "dingbat-to-unicode": ["dingbat-to-unicode@1.0.1", "", {}, "sha512-98l0sW87ZT58pU4i61wa2OHwxbiYSbuxsCBozaVnYX2iCnr3bLM3fIes1/ej7h1YdOKuKt/MLs706TVnALA65w=="], @@ -2417,14 +2388,12 @@ "enhanced-resolve": ["enhanced-resolve@5.21.0", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.3.3" } }, "sha512-otxSQPw4lkOZWkHpB3zaEQs6gWYEsmX4xQF68ElXC/TWvGxGMSGOvoNbaLXm6/cS/fSfHtsEdw90y20PCd+sCA=="], - "entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="], + "entities": ["entities@2.2.0", "", {}, "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A=="], "env-paths": ["env-paths@2.2.1", "", {}, "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A=="], "environment": ["environment@1.1.0", "", {}, "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q=="], - "error": ["error@7.0.2", "", { "dependencies": { "string-template": "~0.2.1", "xtend": "~4.0.0" } }, "sha512-UtVv4l5MhijsYUxPJo4390gzfZvAnTHreNnDjnTZaKIiZ/SemXxAhBkYSKtWa5RtBXbLP8tMgn/n0RUa/H7jXw=="], - "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], @@ -2567,8 +2536,6 @@ "formdata-polyfill": ["formdata-polyfill@4.0.10", "", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="], - "formidable": ["formidable@3.5.4", "", { "dependencies": { "@paralleldrive/cuid2": "^2.2.2", "dezalgo": "^1.0.4", "once": "^1.4.0" } }, "sha512-YikH+7CUTOtP44ZTnUhR7Ic2UASBPOqmaRkRKxRbywPTe5VxF7RRCck4af9wutiZ/QKM5nME9Bie2fFaPz5Gug=="], - "forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], "fraction.js": ["fraction.js@4.3.7", "", {}, "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew=="], @@ -2687,8 +2654,6 @@ "hex-rgb": ["hex-rgb@4.3.0", "", {}, "sha512-Ox1pJVrDCyGHMG9CFg1tmrRUMRPRsAWYc/PinY0XzJU4K7y7vjNoLKIQ7BR5UJMCxNN8EM1MNDmHWA/B3aZUuw=="], - "hexer": ["hexer@1.5.0", "", { "dependencies": { "ansi-color": "^0.2.1", "minimist": "^1.1.0", "process": "^0.10.0", "xtend": "^4.0.0" }, "bin": { "hexer": "./cli.js" } }, "sha512-dyrPC8KzBzUJ19QTIo1gXNqIISRXQ0NwteW6OeQHRN4ZuZeHkdODfj0zHBdOlHbRY8GqbqK57C9oWSvQZizFsg=="], - "hono": ["hono@4.12.15", "", {}, "sha512-qM0jDhFEaCBb4TxoW7f53Qrpv9RBiayUHo0S52JudprkhvpjIrGoU1mnnr29Fvd1U335ZFPZQY1wlkqgfGXyLg=="], "html-encoding-sniffer": ["html-encoding-sniffer@4.0.0", "", { "dependencies": { "whatwg-encoding": "^3.1.1" } }, "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ=="], @@ -2703,7 +2668,7 @@ "html-void-elements": ["html-void-elements@3.0.0", "", {}, "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg=="], - "htmlparser2": ["htmlparser2@8.0.2", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.0.1", "entities": "^4.4.0" } }, "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA=="], + "htmlparser2": ["htmlparser2@10.1.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.2.2", "entities": "^7.0.1" } }, "sha512-VTZkM9GWRAtEpveh7MSF6SjjrpNVNNVJfFup7xTY3UpFtm67foy9HDVXneLtFVt4pMz5kZtgNcvCniNFb1hlEQ=="], "http-errors": ["http-errors@2.0.1", "", { "dependencies": { "depd": "~2.0.0", "inherits": "~2.0.4", "setprototypeof": "~1.2.0", "statuses": "~2.0.2", "toidentifier": "~1.0.1" } }, "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ=="], @@ -2723,7 +2688,7 @@ "husky": ["husky@9.1.7", "", { "bin": { "husky": "bin.js" } }, "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA=="], - "iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "iconv-lite": ["iconv-lite@0.7.1", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-2Tth85cXwGFHfvRgZWszZSvdo+0Xsqmw8k8ZwxScfcBneNUraK+dxRxRm24nszx80Y0TVio8kKLt5sLE7ZCLlw=="], "idb-keyval": ["idb-keyval@6.2.2", "", {}, "sha512-yjD9nARJ/jb1g+CvD0tlhUHOrJ9Sy0P8T9MF3YaLlHnSRpwPfpTX0XIvpmw3gAJUmEu3FiICLBDPXVwyEvrleg=="], @@ -2821,16 +2786,12 @@ "jackspeak": ["jackspeak@4.2.3", "", { "dependencies": { "@isaacs/cliui": "^9.0.0" } }, "sha512-ykkVRwrYvFm1nb2AJfKKYPr0emF6IiXDYUaFx4Zn9ZuIH7MrzEZ3sD5RlqGXNRpHtvUHJyOnCEFxOlNDtGo7wg=="], - "jaeger-client": ["jaeger-client@3.19.0", "", { "dependencies": { "node-int64": "^0.4.0", "opentracing": "^0.14.4", "thriftrw": "^3.5.0", "uuid": "^8.3.2", "xorshift": "^1.1.1" } }, "sha512-M0c7cKHmdyEUtjemnJyx/y9uX16XHocL46yQvyqDlPdvAcwPDbHrIbKjQdBqtiE4apQ/9dmr+ZLJYYPGnurgpw=="], - "jiti": ["jiti@2.4.2", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A=="], "jose": ["jose@6.0.11", "", {}, "sha512-QxG7EaliDARm1O1S8BGakqncGT9s25bKL1WSf6/oa17Tkqwi8D2ZNglqCF+DsYF88/rV66Q/Q2mFAy697E1DUg=="], "joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="], - "js-md4": ["js-md4@0.3.2", "", {}, "sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA=="], - "js-tiktoken": ["js-tiktoken@1.0.21", "", { "dependencies": { "base64-js": "^1.5.1" } }, "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g=="], "js-tokens": ["js-tokens@10.0.0", "", {}, "sha512-lM/UBzQmfJRo9ABXbPWemivdCW8V2G8FHaHdypQaIy523snUjog0W71ayWXTjiR+ixeMyVHN2XcpnTd/liPg/Q=="], @@ -2991,14 +2952,12 @@ "markdown-table": ["markdown-table@3.0.4", "", {}, "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw=="], - "marked": ["marked@17.0.4", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-NOmVMM+KAokHMvjWmC5N/ZOvgmSWuqJB8FoYI019j4ogb/PeRMKoKIjReZ2w3376kkA8dSJIP8uD993Kxc0iRQ=="], + "marked": ["marked@16.4.2", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA=="], "marky": ["marky@1.3.0", "", {}, "sha512-ocnPZQLNpvbedwTy9kNrQEsknEfgvcLMvOtz3sFeWApDq1MXH1TqkCIx58xlpESsfwQOnuBO9beyQuNGzVvuhQ=="], "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], - "md-to-react-email": ["md-to-react-email@5.0.5", "", { "dependencies": { "marked": "7.0.4" }, "peerDependencies": { "react": "^18.0 || ^19.0" } }, "sha512-OvAXqwq57uOk+WZqFFNCMZz8yDp8BD3WazW1wAKHUrPbbdr89K9DWS6JXY09vd9xNdPNeurI8DU/X4flcfaD8A=="], - "mdast-util-find-and-replace": ["mdast-util-find-and-replace@3.0.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "escape-string-regexp": "^5.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg=="], "mdast-util-from-markdown": ["mdast-util-from-markdown@2.0.3", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "mdast-util-to-string": "^4.0.0", "micromark": "^4.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-W4mAWTvSlKvf8L6J+VN9yLSqQ9AOAAvHuoDAmPkz4dHf553m5gVj2ejadHJhoJmcmxEnOv6Pa8XJhpxE93kb8Q=="], @@ -3223,8 +3182,6 @@ "node-gyp-build": ["node-gyp-build@4.8.4", "", { "bin": { "node-gyp-build": "bin.js", "node-gyp-build-optional": "optional.js", "node-gyp-build-test": "build-test.js" } }, "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ=="], - "node-int64": ["node-int64@0.4.0", "", {}, "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw=="], - "node-releases": ["node-releases@2.0.38", "", {}, "sha512-3qT/88Y3FbH/Kx4szpQQ4HzUbVrHPKTLVpVocKiLfoYvw9XSGOX2FmD2d6DrXbVYyAQTF2HeF6My8jmzx7/CRw=="], "node-rsa": ["node-rsa@1.1.1", "", { "dependencies": { "asn1": "^0.2.4" } }, "sha512-Jd4cvbJMryN21r5HgxQOpMEqv+ooke/korixNNK3mGqfGJmy0M77WDDzo/05969+OkMy3XW1UuZsSmW9KQm7Fw=="], @@ -3279,8 +3236,6 @@ "openapi-typescript-helpers": ["openapi-typescript-helpers@0.0.15", "", {}, "sha512-opyTPaunsklCBpTK8JGef6mfPhLSnyy5a0IN9vKtx3+4aExf+KxEqYwIy3hqkedXIB97u357uLMJsOnm3GVjsw=="], - "opentracing": ["opentracing@0.14.7", "", {}, "sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q=="], - "option": ["option@0.2.4", "", {}, "sha512-pkEqbDyl8ou5cpq+VsnQbe/WlEy5qS7xPzMS1U55OCG9KPvwFD46zDbxQIj3egJSFc3D+XhYOPUzz49zQAVy7A=="], "ora": ["ora@8.2.0", "", { "dependencies": { "chalk": "^5.3.0", "cli-cursor": "^5.0.0", "cli-spinners": "^2.9.2", "is-interactive": "^2.0.0", "is-unicode-supported": "^2.0.0", "log-symbols": "^6.0.0", "stdin-discarder": "^0.2.2", "string-width": "^7.2.0", "strip-ansi": "^7.1.0" } }, "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw=="], @@ -3395,8 +3350,6 @@ "postcss-load-config": ["postcss-load-config@6.0.1", "", { "dependencies": { "lilconfig": "^3.1.1" }, "peerDependencies": { "jiti": ">=1.21.0", "postcss": ">=8.0.9", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["jiti", "postcss", "tsx", "yaml"] }, "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g=="], - "postcss-media-query-parser": ["postcss-media-query-parser@0.2.3", "", {}, "sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig=="], - "postcss-nested": ["postcss-nested@6.2.0", "", { "dependencies": { "postcss-selector-parser": "^6.1.1" }, "peerDependencies": { "postcss": "^8.2.14" } }, "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ=="], "postcss-selector-parser": ["postcss-selector-parser@6.0.10", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w=="], @@ -3669,8 +3622,6 @@ "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], - "shell-quote": ["shell-quote@1.8.3", "", {}, "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw=="], - "shiki": ["shiki@4.0.0", "", { "dependencies": { "@shikijs/core": "4.0.0", "@shikijs/engine-javascript": "4.0.0", "@shikijs/engine-oniguruma": "4.0.0", "@shikijs/langs": "4.0.0", "@shikijs/themes": "4.0.0", "@shikijs/types": "4.0.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-rjKoiw30ZaFsM0xnPPwxco/Jftz/XXqZkcQZBTX4LGheDw8gCDEH87jdgaKDEG3FZO2bFOK27+sR/sDHhbBXfg=="], "shimmer": ["shimmer@1.2.1", "", {}, "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw=="], @@ -3709,8 +3660,6 @@ "smart-buffer": ["smart-buffer@4.2.0", "", {}, "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg=="], - "soap": ["soap@1.8.0", "", { "dependencies": { "axios": "^1.13.6", "axios-ntlm": "^1.4.6", "debug": "^4.4.3", "follow-redirects": "^1.15.11", "formidable": "^3.5.4", "sax": "^1.5.0", "whatwg-mimetype": "4.0.0", "xml-crypto": "^6.1.2" } }, "sha512-WRIzZm4M13a9j1t8yMdZZtbbkxNatXAhvtO8UXc/LvdfZ/Op1MqZS6qsAbILLsLTk3oLM/PRw0XOG0U53dAZzg=="], - "socket.io": ["socket.io@4.8.3", "", { "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "cors": "~2.8.5", "debug": "~4.4.1", "engine.io": "~6.6.0", "socket.io-adapter": "~2.5.2", "socket.io-parser": "~4.2.4" } }, "sha512-2Dd78bqzzjE6KPkD5fHZmDAKRNe3J15q+YHDrIsy9WEkqttc7GY+kT9OBLSMaPbQaEd0x1BjcmtMtXkfpc+T5A=="], "socket.io-adapter": ["socket.io-adapter@2.5.6", "", { "dependencies": { "debug": "~4.4.1", "ws": "~8.18.3" } }, "sha512-DkkO/dz7MGln0dHn5bmN3pPy+JmywNICWrJqVWiVOyvXjWQFIv9c2h24JrQLLFJ2aQVQf/Cvl1vblnd4r2apLQ=="], @@ -3763,8 +3712,6 @@ "string-argv": ["string-argv@0.3.2", "", {}, "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q=="], - "string-template": ["string-template@0.2.1", "", {}, "sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw=="], - "string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], "string-width-cjs": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], @@ -3807,7 +3754,7 @@ "sucrase": ["sucrase@3.35.1", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.2", "commander": "^4.0.0", "lines-and-columns": "^1.1.6", "mz": "^2.7.0", "pirates": "^4.0.1", "tinyglobby": "^0.2.11", "ts-interface-checker": "^0.1.9" }, "bin": { "sucrase": "bin/sucrase", "sucrase-node": "bin/sucrase-node" } }, "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw=="], - "supports-color": ["supports-color@8.1.1", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="], + "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="], @@ -3843,12 +3790,10 @@ "thenify-all": ["thenify-all@1.6.0", "", { "dependencies": { "thenify": ">= 3.1.0 < 4" } }, "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA=="], - "thread-stream": ["thread-stream@4.0.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA=="], + "thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="], "three": ["three@0.177.0", "", {}, "sha512-EiXv5/qWAaGI+Vz2A+JfavwYCMdGjxVsrn3oBwllUoqYeaBO75J63ZfyaQKoiLrqNHoTlUc6PFgMXnS0kI45zg=="], - "thriftrw": ["thriftrw@3.11.4", "", { "dependencies": { "bufrw": "^1.2.1", "error": "7.0.2", "long": "^2.4.0" }, "bin": { "thrift2json": "thrift2json.js" } }, "sha512-UcuBd3eanB3T10nXWRRMwfwoaC6VMk7qe3/5YIWP2Jtw+EbHqJ0p1/K3x8ixiR5dozKSSfcg1W+0e33G1Di3XA=="], - "through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="], "tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="], @@ -3883,8 +3828,6 @@ "tree-changes": ["tree-changes@0.11.3", "", { "dependencies": { "@gilbarbara/deep-equal": "^0.3.1", "is-lite": "^1.2.1" } }, "sha512-r14mvDZ6tqz8PRQmlFKjhUVngu4VZ9d92ON3tp0EGpFBE6PAHOq8Bx8m8ahbNoGE3uI/npjYcJiqVydyOiYXag=="], - "tree-kill": ["tree-kill@1.2.2", "", { "bin": { "tree-kill": "cli.js" } }, "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A=="], - "trim-lines": ["trim-lines@3.0.1", "", {}, "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg=="], "trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="], @@ -4069,12 +4012,8 @@ "xmlhttprequest-ssl": ["xmlhttprequest-ssl@2.1.2", "", {}, "sha512-TEU+nJVUUnA4CYJFLvK5X9AOeH4KvDvhIfm0vV1GaQRtchnG0hgK5p8hw/xjv8cunWYCsiPCSDzObPyhEwq3KQ=="], - "xorshift": ["xorshift@1.2.0", "", {}, "sha512-iYgNnGyeeJ4t6U11NpA/QiKy+PXn5Aa3Azg5qkwIFz1tBLllQrjjsk9yzD7IAK0naNU4JxdeDgqW9ov4u/hc4g=="], - "xpath": ["xpath@0.0.34", "", {}, "sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA=="], - "xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="], - "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], @@ -4099,7 +4038,7 @@ "zod-validation-error": ["zod-validation-error@1.5.0", "", { "peerDependencies": { "zod": "^3.18.0" } }, "sha512-/7eFkAI4qV0tcxMBB/3+d2c1P6jzzZYdYSlBuAklzMuCrJu5bzJfHS0yVAS87dRHVlhftd6RFJDIvv03JgkSbw=="], - "zustand": ["zustand@4.5.7", "", { "dependencies": { "use-sync-external-store": "^1.2.2" }, "peerDependencies": { "@types/react": ">=16.8", "immer": ">=9.0.6", "react": ">=16.8" }, "optionalPeers": ["@types/react", "immer", "react"] }, "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw=="], + "zustand": ["zustand@5.0.13", "", { "peerDependencies": { "@types/react": ">=18.0.0", "immer": ">=9.0.6", "react": ">=18.0.0", "use-sync-external-store": ">=1.2.0" }, "optionalPeers": ["@types/react", "immer", "react", "use-sync-external-store"] }, "sha512-efI2tVaVQPqtOh114loML/Z80Y4NP3yc+Ff0fYiZJPauNeWZeIp/bRFD7I9bfmCOYBh/PHxlglQ9+wvlwnPikQ=="], "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], @@ -4115,184 +4054,6 @@ "@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="], - "@aws-sdk/client-bedrock-runtime/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/client-rds-data/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/client-rds-data/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], - - "@aws-sdk/client-rds-data/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-rds-data/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-rds-data/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-rds-data/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/client-rds-data/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-rds-data/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-rds-data/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-rds-data/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-rds-data/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/client-sesv2/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], - - "@aws-sdk/client-sesv2/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-sesv2/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-sesv2/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-sesv2/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/client-sesv2/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-ugHZEoktD/bG6mdgmhzLDjMP2VrYRAUPRPF1DpCyiZexkH7DCU7XrSJyXMvkcf0DHV+URk0q2sLf/oqn1D2uYw=="], - - "@aws-sdk/client-sesv2/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-sesv2/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-sesv2/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-sesv2/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/client-sqs/@aws-sdk/core": ["@aws-sdk/core@3.947.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.7", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-Khq4zHhuAkvCFuFbgcy3GrZTzfSX7ZIjIcW1zRDxXRLZKRtuhnZdonqTUfaWi5K42/4OmxkYNpsO7X7trQOeHw=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.947.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.947.0", "@aws-sdk/credential-provider-http": "3.947.0", "@aws-sdk/credential-provider-ini": "3.947.0", "@aws-sdk/credential-provider-process": "3.947.0", "@aws-sdk/credential-provider-sso": "3.947.0", "@aws-sdk/credential-provider-web-identity": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-S0Zqebr71KyrT6J4uYPhwV65g4V5uDPHnd7dt2W34FcyPu+hVC7Hx4MFmsPyVLeT5cMCkkZvmY3kAoEzgUPJJg=="], - - "@aws-sdk/client-sqs/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/client-sqs/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/client-sqs/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/client-sqs/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.947.0", "", { "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.7", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-7rpKV8YNgCP2R4F9RjWZFcD2R+SO/0R4VHIbY9iZJdH2MzzJ8ZG7h8dZ2m8QkQd1fjx4wrFJGGPJUTYXPV3baA=="], - - "@aws-sdk/client-sqs/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/client-sqs/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/client-sqs/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/client-sqs/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/client-sqs/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.947.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-+vhHoDrdbb+zerV4noQk1DHaUMNzWFWPpPYjVTwW2186k5BEJIecAMChYkghRrBVJ3KPWP1+JnZwOd72F3d4rQ=="], - "@aws-sdk/client-sso/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], "@aws-sdk/client-sso/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], @@ -4313,53 +4074,9 @@ "@aws-sdk/client-sso/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - "@aws-sdk/credential-provider-ini/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.997.4", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.6", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.36", "@aws-sdk/region-config-resolver": "^3.972.13", "@aws-sdk/signature-v4-multi-region": "^3.996.23", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.22", "@smithy/config-resolver": "^4.4.17", "@smithy/core": "^3.23.17", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/middleware-retry": "^4.5.6", "@smithy/middleware-serde": "^4.2.20", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.6.1", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.49", "@smithy/util-defaults-mode-node": "^4.2.54", "@smithy/util-endpoints": "^3.4.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-4Sf+WY1lMJzXlw5MiyCMe/UzdILCwvuaHThbqMXS6dfh9gZy3No360I42RXquOI/ULUOhWy2HCyU0Fp20fQGPQ=="], - - "@aws-sdk/credential-provider-login/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.997.4", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.6", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.36", "@aws-sdk/region-config-resolver": "^3.972.13", "@aws-sdk/signature-v4-multi-region": "^3.996.23", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.22", "@smithy/config-resolver": "^4.4.17", "@smithy/core": "^3.23.17", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/middleware-retry": "^4.5.6", "@smithy/middleware-serde": "^4.2.20", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.6.1", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.49", "@smithy/util-defaults-mode-node": "^4.2.54", "@smithy/util-endpoints": "^3.4.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-4Sf+WY1lMJzXlw5MiyCMe/UzdILCwvuaHThbqMXS6dfh9gZy3No360I42RXquOI/ULUOhWy2HCyU0Fp20fQGPQ=="], - - "@aws-sdk/credential-provider-sso/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.997.4", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.6", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.36", "@aws-sdk/region-config-resolver": "^3.972.13", "@aws-sdk/signature-v4-multi-region": "^3.996.23", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.22", "@smithy/config-resolver": "^4.4.17", "@smithy/core": "^3.23.17", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/middleware-retry": "^4.5.6", "@smithy/middleware-serde": "^4.2.20", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.6.1", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.49", "@smithy/util-defaults-mode-node": "^4.2.54", "@smithy/util-endpoints": "^3.4.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-4Sf+WY1lMJzXlw5MiyCMe/UzdILCwvuaHThbqMXS6dfh9gZy3No360I42RXquOI/ULUOhWy2HCyU0Fp20fQGPQ=="], - "@aws-sdk/credential-provider-sso/@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.1038.0", "", { "dependencies": { "@aws-sdk/core": "^3.974.6", "@aws-sdk/nested-clients": "^3.997.4", "@aws-sdk/types": "^3.973.8", "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-Qniru+9oGGb/HNK/gGZWbV3jsD0k71ngE7qMQ/x6gYNYLd2EOwHCS6E2E6jfkaqO4i0d+nNKmfRy8bNcshKdGQ=="], - "@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.997.4", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.974.6", "@aws-sdk/middleware-host-header": "^3.972.10", "@aws-sdk/middleware-logger": "^3.972.10", "@aws-sdk/middleware-recursion-detection": "^3.972.11", "@aws-sdk/middleware-user-agent": "^3.972.36", "@aws-sdk/region-config-resolver": "^3.972.13", "@aws-sdk/signature-v4-multi-region": "^3.996.23", "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-endpoints": "^3.996.8", "@aws-sdk/util-user-agent-browser": "^3.972.10", "@aws-sdk/util-user-agent-node": "^3.973.22", "@smithy/config-resolver": "^4.4.17", "@smithy/core": "^3.23.17", "@smithy/fetch-http-handler": "^5.3.17", "@smithy/hash-node": "^4.2.14", "@smithy/invalid-dependency": "^4.2.14", "@smithy/middleware-content-length": "^4.2.14", "@smithy/middleware-endpoint": "^4.4.32", "@smithy/middleware-retry": "^4.5.6", "@smithy/middleware-serde": "^4.2.20", "@smithy/middleware-stack": "^4.2.14", "@smithy/node-config-provider": "^4.3.14", "@smithy/node-http-handler": "^4.6.1", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.49", "@smithy/util-defaults-mode-node": "^4.2.54", "@smithy/util-endpoints": "^3.4.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.5", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-4Sf+WY1lMJzXlw5MiyCMe/UzdILCwvuaHThbqMXS6dfh9gZy3No360I42RXquOI/ULUOhWy2HCyU0Fp20fQGPQ=="], - - "@aws-sdk/eventstream-handler-node/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/lib-dynamodb/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/middleware-endpoint-discovery/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/middleware-eventstream/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/middleware-sdk-sqs/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/middleware-websocket/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/middleware-websocket/@aws-sdk/util-format-url": ["@aws-sdk/util-format-url@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/querystring-builder": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-MS5eSEtDUFIAMHrJaMERiHAvDPdfxc/T869ZjDNFAIiZhyc037REw0aoTNeimNXDNy2txRNZJaAUn/kE4RwN+g=="], - - "@aws-sdk/nested-clients/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/nested-clients/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], - - "@aws-sdk/nested-clients/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], - - "@aws-sdk/nested-clients/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], - - "@aws-sdk/nested-clients/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], - - "@aws-sdk/nested-clients/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], - - "@aws-sdk/nested-clients/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/nested-clients/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], - - "@aws-sdk/nested-clients/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], - - "@aws-sdk/nested-clients/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], - - "@aws-sdk/token-providers/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], - - "@aws-sdk/token-providers/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], + "@aws-sdk/dynamodb-codec/@aws-sdk/core": ["@aws-sdk/core@3.974.8", "", { "dependencies": { "@aws-sdk/types": "^3.973.8", "@aws-sdk/xml-builder": "^3.972.22", "@smithy/core": "^3.23.17", "@smithy/node-config-provider": "^4.3.14", "@smithy/property-provider": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/signature-v4": "^5.3.14", "@smithy/smithy-client": "^4.12.13", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-retry": "^4.3.6", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-njR2qoG6ZuB0kvAS2FyICsFZJ6gmCcf2X/7JcD14sUvGDm26wiZ5BrA6LOiUxKFEF+IVe7kdroxyE00YlkiYsw=="], "@azure/communication-email/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], @@ -4389,8 +4106,6 @@ "@esbuild-kit/core-utils/esbuild": ["esbuild@0.18.20", "", { "optionalDependencies": { "@esbuild/android-arm": "0.18.20", "@esbuild/android-arm64": "0.18.20", "@esbuild/android-x64": "0.18.20", "@esbuild/darwin-arm64": "0.18.20", "@esbuild/darwin-x64": "0.18.20", "@esbuild/freebsd-arm64": "0.18.20", "@esbuild/freebsd-x64": "0.18.20", "@esbuild/linux-arm": "0.18.20", "@esbuild/linux-arm64": "0.18.20", "@esbuild/linux-ia32": "0.18.20", "@esbuild/linux-loong64": "0.18.20", "@esbuild/linux-mips64el": "0.18.20", "@esbuild/linux-ppc64": "0.18.20", "@esbuild/linux-riscv64": "0.18.20", "@esbuild/linux-s390x": "0.18.20", "@esbuild/linux-x64": "0.18.20", "@esbuild/netbsd-x64": "0.18.20", "@esbuild/openbsd-x64": "0.18.20", "@esbuild/sunos-x64": "0.18.20", "@esbuild/win32-arm64": "0.18.20", "@esbuild/win32-ia32": "0.18.20", "@esbuild/win32-x64": "0.18.20" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA=="], - "@inquirer/external-editor/iconv-lite": ["iconv-lite@0.7.1", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-2Tth85cXwGFHfvRgZWszZSvdo+0Xsqmw8k8ZwxScfcBneNUraK+dxRxRm24nszx80Y0TVio8kKLt5sLE7ZCLlw=="], - "@langchain/core/ansi-styles": ["ansi-styles@5.2.0", "", {}, "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA=="], "@langchain/core/uuid": ["uuid@10.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="], @@ -4407,82 +4122,36 @@ "@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="], - "@opentelemetry/exporter-jaeger/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.1.0", "", { "dependencies": { "@opentelemetry/core": "2.1.0", "@opentelemetry/resources": "2.1.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-uTX9FBlVQm4S2gVQO1sb5qyBLq/FPjbp+tmGoxu4tIgtYGmBYB44+KX/725RFDe30yBSaA9Ml9fqphe1hbUyLQ=="], - - "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - - "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - - "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/exporter-prometheus/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-prometheus/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/exporter-trace-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-trace-otlp-http/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/exporter-zipkin/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/exporter-zipkin/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/otlp-exporter-base/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - - "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - - "@opentelemetry/otlp-transformer/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/otlp-transformer/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/propagator-b3/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - - "@opentelemetry/propagator-jaeger/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/resources/@opentelemetry/core": ["@opentelemetry/core@2.7.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-DT12SXVwV2eoJrGf4nnsvZojxxeQo+LlNAsoYGRRObPWTeN6APiqZ2+nqDCQDvQX40eLi1AePONS0onoASp3yQ=="], - "@opentelemetry/sdk-logs/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/sdk-logs/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/sdk-metrics/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/sdk-metrics/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/sdk-node/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/sdk-node/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/sdk-trace-base/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - "@opentelemetry/sdk-trace-base/@opentelemetry/resources": ["@opentelemetry/resources@2.0.0", "", { "dependencies": { "@opentelemetry/core": "2.0.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg=="], - "@opentelemetry/sdk-trace-node/@opentelemetry/core": ["@opentelemetry/core@2.0.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ=="], - - "@paralleldrive/cuid2/@noble/hashes": ["@noble/hashes@1.8.0", "", {}, "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A=="], - "@puppeteer/browsers/tar-fs": ["tar-fs@3.1.2", "", { "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" } }, "sha512-QGxxTxxyleAdyM3kpFs14ymbYmNFrfY+pHj7Z8FgtbZ7w2//VAgLMac7sT6nRpIHjppXO2AwwEOg0bPFVRcmXw=="], "@radix-ui/react-alert-dialog/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="], @@ -4517,9 +4186,21 @@ "@radix-ui/react-visually-hidden/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.4", "", { "dependencies": { "@radix-ui/react-slot": "1.2.4" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg=="], - "@react-email/code-block/prismjs": ["prismjs@1.29.0", "", {}, "sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q=="], + "@react-email/components/@react-email/render": ["@react-email/render@1.4.0", "", { "dependencies": { "html-to-text": "^9.0.5", "prettier": "^3.5.3", "react-promise-suspense": "^0.3.4" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-ZtJ3noggIvW1ZAryoui95KJENKdCzLmN5F7hyZY1F/17B1vwzuxHB7YkuCg0QqHjDivc5axqYEYdIOw4JIQdUw=="], - "@react-email/components/@react-email/render": ["@react-email/render@1.0.5", "", { "dependencies": { "html-to-text": "9.0.5", "prettier": "3.4.2", "react-promise-suspense": "0.3.4" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-CA69HYXPk21HhtAXATIr+9JJwpDNmAFCvdMUjWmeoD1+KhJ9NAxusMRxKNeibdZdslmq3edaeOKGbdQ9qjK8LQ=="], + "@react-email/markdown/marked": ["marked@15.0.12", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA=="], + + "@reactflow/background/zustand": ["zustand@4.5.7", "", { "dependencies": { "use-sync-external-store": "^1.2.2" }, "peerDependencies": { "@types/react": ">=16.8", "immer": ">=9.0.6", "react": ">=16.8" }, "optionalPeers": ["@types/react", "immer", "react"] }, "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw=="], + + "@reactflow/controls/zustand": ["zustand@4.5.7", "", { "dependencies": { "use-sync-external-store": "^1.2.2" }, "peerDependencies": { "@types/react": ">=16.8", "immer": ">=9.0.6", "react": ">=16.8" }, "optionalPeers": ["@types/react", "immer", "react"] }, "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw=="], + + "@reactflow/core/zustand": ["zustand@4.5.7", "", { "dependencies": { "use-sync-external-store": "^1.2.2" }, "peerDependencies": { "@types/react": ">=16.8", "immer": ">=9.0.6", "react": ">=16.8" }, "optionalPeers": ["@types/react", "immer", "react"] }, "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw=="], + + "@reactflow/minimap/zustand": ["zustand@4.5.7", "", { "dependencies": { "use-sync-external-store": "^1.2.2" }, "peerDependencies": { "@types/react": ">=16.8", "immer": ">=9.0.6", "react": ">=16.8" }, "optionalPeers": ["@types/react", "immer", "react"] }, "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw=="], + + "@reactflow/node-resizer/zustand": ["zustand@4.5.7", "", { "dependencies": { "use-sync-external-store": "^1.2.2" }, "peerDependencies": { "@types/react": ">=16.8", "immer": ">=9.0.6", "react": ">=16.8" }, "optionalPeers": ["@types/react", "immer", "react"] }, "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw=="], + + "@reactflow/node-toolbar/zustand": ["zustand@4.5.7", "", { "dependencies": { "use-sync-external-store": "^1.2.2" }, "peerDependencies": { "@types/react": ">=16.8", "immer": ">=9.0.6", "react": ">=16.8" }, "optionalPeers": ["@types/react", "immer", "react"] }, "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw=="], "@shuding/opentype.js/fflate": ["fflate@0.7.4", "", {}, "sha512-5u2V/CDW15QM1XbbgS+0DfPxVB+jUKhWEKuuFuHncbk3tEEqzmoXL+2KyOFuKGqOnmdIy0/davWF1CkuwtibCw=="], @@ -4597,6 +4278,8 @@ "@types/node-fetch/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "@types/nodemailer/@aws-sdk/client-sesv2": ["@aws-sdk/client-sesv2@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/signature-v4-multi-region": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-jDQ4x2HwB2/UXBS7CTeSDiIb+sVsYGDyxTeXdrRAtqNdGv8kC54fbwokDiJ/mnMyB2gyXWw57BqeDJNkZuLmsw=="], + "@types/nodemailer/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], "@types/papaparse/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], @@ -4625,16 +4308,12 @@ "bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - "body-parser/iconv-lite": ["iconv-lite@0.7.1", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-2Tth85cXwGFHfvRgZWszZSvdo+0Xsqmw8k8ZwxScfcBneNUraK+dxRxRm24nszx80Y0TVio8kKLt5sLE7ZCLlw=="], - "c12/chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="], "c12/confbox": ["confbox@0.2.4", "", {}, "sha512-ysOGlgTFbN2/Y6Cg3Iye8YKulHw+R2fNXHrgSmXISQdMnomY6eNDprVdW9R5xBguEqI954+S6709UyiO7B+6OQ=="], "c12/pkg-types": ["pkg-types@2.3.1", "", { "dependencies": { "confbox": "^0.2.4", "exsolve": "^1.0.8", "pathe": "^2.0.3" } }, "sha512-y+ichcgc2LrADuhLNAx8DFjVfgz91pRxfZdI3UDhxHvcVEZsenLO+7XaU5vOp0u/7V/wZ+plyuQxtrDlZJ+yeg=="], - "cheerio/htmlparser2": ["htmlparser2@10.1.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.2.2", "entities": "^7.0.1" } }, "sha512-VTZkM9GWRAtEpveh7MSF6SjjrpNVNNVJfFup7xTY3UpFtm67foy9HDVXneLtFVt4pMz5kZtgNcvCniNFb1hlEQ=="], - "chrome-launcher/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], "chromium-bidi/zod": ["zod@3.23.8", "", {}, "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g=="], @@ -4647,16 +4326,14 @@ "concat-stream/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - "concurrently/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - - "critters/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], "cytoscape-fcose/cose-base": ["cose-base@2.2.0", "", { "dependencies": { "layout-base": "^2.0.0" } }, "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g=="], "d3-dsv/commander": ["commander@7.2.0", "", {}, "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw=="], + "d3-dsv/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "d3-sankey/d3-array": ["d3-array@2.12.1", "", { "dependencies": { "internmap": "^1.0.0" } }, "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ=="], "d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw=="], @@ -4669,6 +4346,10 @@ "e2b/glob": ["glob@11.1.0", "", { "dependencies": { "foreground-child": "^3.3.1", "jackspeak": "^4.1.1", "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw=="], + "encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + + "encoding-sniffer/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "engine.io/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], "engine.io/ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="], @@ -4723,14 +4404,12 @@ "groq-sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], - "hexer/process": ["process@0.10.1", "", {}, "sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA=="], + "html-to-text/htmlparser2": ["htmlparser2@8.0.2", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.0.1", "entities": "^4.4.0" } }, "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA=="], - "htmlparser2/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], + "htmlparser2/entities": ["entities@7.0.1", "", {}, "sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA=="], "http-response-object/@types/node": ["@types/node@10.17.60", "", {}, "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw=="], - "imapflow/iconv-lite": ["iconv-lite@0.7.1", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-2Tth85cXwGFHfvRgZWszZSvdo+0Xsqmw8k8ZwxScfcBneNUraK+dxRxRm24nszx80Y0TVio8kKLt5sLE7ZCLlw=="], - "imapflow/nodemailer": ["nodemailer@7.0.12", "", {}, "sha512-H+rnK5bX2Pi/6ms3sN4/jRQvYSMltV6vqup/0SFOrxYYY/qoNvhXPlYq3e+Pm9RFJRwrMGbMIwi81M4dxpomhA=="], "imapflow/pino": ["pino@10.1.0", "", { "dependencies": { "@pinojs/redact": "^0.4.0", "atomic-sleep": "^1.0.0", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pino-std-serializers": "^7.0.0", "process-warning": "^5.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", "sonic-boom": "^4.0.1", "thread-stream": "^3.0.0" }, "bin": { "pino": "bin.js" } }, "sha512-0zZC2ygfdqvqK8zJIr1e+wT1T/L+LF6qvqvbzEQ6tiMAoTqEVK9a1K3YRu8HEUvGEvNqZyPJTtb2sNIoTkB83w=="], @@ -4741,10 +4420,6 @@ "isomorphic-unfetch/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], - "istanbul-lib-report/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - - "jaeger-client/uuid": ["uuid@8.3.2", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="], - "json-schema-to-typescript/js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], "katex/commander": ["commander@8.3.0", "", {}, "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww=="], @@ -4753,6 +4428,8 @@ "langsmith/uuid": ["uuid@10.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="], + "libmime/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "linebreak/base64-js": ["base64-js@0.0.8", "", {}, "sha512-3XSA2cR/h/73EzlXXdU6YNycmYI7+kicTxks4eJg2g39biHR84slg2+des+p7iHYhbRg/udIS4TD53WabcOUkw=="], "lint-staged/commander": ["commander@13.1.0", "", {}, "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw=="], @@ -4773,18 +4450,16 @@ "mammoth/argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="], - "md-to-react-email/marked": ["marked@7.0.4", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-t8eP0dXRJMtMvBojtkcsA7n48BkauktUKzfkPSCq85ZMTJ0v76Rke4DYz01omYpPTUh4p/f7HePgRo3ebG8+QQ=="], - "mdast-util-find-and-replace/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="], - "mermaid/marked": ["marked@16.4.2", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA=="], - "micromatch/picomatch": ["picomatch@2.3.2", "", {}, "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA=="], "monaco-editor/dompurify": ["dompurify@3.2.7", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw=="], "monaco-editor/marked": ["marked@14.0.0", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ=="], + "mysql2/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "neo4j-driver-bolt-connection/string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], "next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="], @@ -4815,9 +4490,9 @@ "parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="], - "pdf-lib/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], + "parse5/entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="], - "pino/thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="], + "pdf-lib/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], "pino-pretty/pino-abstract-transport": ["pino-abstract-transport@3.0.0", "", { "dependencies": { "split2": "^4.0.0" } }, "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg=="], @@ -4843,8 +4518,6 @@ "puppeteer-core/devtools-protocol": ["devtools-protocol@0.0.1312386", "", {}, "sha512-DPnhUXvmvKT2dFA/j7B+riVLUt9Q6RKJlcppojL5CoRywJJKLDYnRlw0gTFKfgDPHP5E04UoB71SxoJlVZy8FA=="], - "raw-body/iconv-lite": ["iconv-lite@0.7.1", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-2Tth85cXwGFHfvRgZWszZSvdo+0Xsqmw8k8ZwxScfcBneNUraK+dxRxRm24nszx80Y0TVio8kKLt5sLE7ZCLlw=="], - "rc/strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="], "react-email/chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="], @@ -4869,8 +4542,6 @@ "restore-cursor/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], - "rss-parser/entities": ["entities@2.2.0", "", {}, "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A=="], - "sim/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], "sim/lucide-react": ["lucide-react@0.479.0", "", { "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-aBhNnveRhorBOK7uA4gDjgaf+YlHMdMhQ/3cupk6exM10hWlEU+2QtWYOfhXhjAsmdb6LeKR+NZnow4UxRRiTQ=="], @@ -4897,6 +4568,8 @@ "source-map-support/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], + "streamdown/marked": ["marked@17.0.4", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-NOmVMM+KAokHMvjWmC5N/ZOvgmSWuqJB8FoYI019j4ogb/PeRMKoKIjReZ2w3376kkA8dSJIP8uD993Kxc0iRQ=="], + "string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], "string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], @@ -4915,8 +4588,6 @@ "test-exclude/glob": ["glob@10.5.0", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg=="], - "thriftrw/long": ["long@2.4.0", "", {}, "sha512-ijUtjmO/n2A5PaosNG9ZGDsQ3vxJg7ZW8vsY8Kp0f2yIZWhSJvjmegV7t+9RPQKxKrvj8yKGehhS+po14hPLGQ=="], - "tsx/esbuild": ["esbuild@0.27.7", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.7", "@esbuild/android-arm": "0.27.7", "@esbuild/android-arm64": "0.27.7", "@esbuild/android-x64": "0.27.7", "@esbuild/darwin-arm64": "0.27.7", "@esbuild/darwin-x64": "0.27.7", "@esbuild/freebsd-arm64": "0.27.7", "@esbuild/freebsd-x64": "0.27.7", "@esbuild/linux-arm": "0.27.7", "@esbuild/linux-arm64": "0.27.7", "@esbuild/linux-ia32": "0.27.7", "@esbuild/linux-loong64": "0.27.7", "@esbuild/linux-mips64el": "0.27.7", "@esbuild/linux-ppc64": "0.27.7", "@esbuild/linux-riscv64": "0.27.7", "@esbuild/linux-s390x": "0.27.7", "@esbuild/linux-x64": "0.27.7", "@esbuild/netbsd-arm64": "0.27.7", "@esbuild/netbsd-x64": "0.27.7", "@esbuild/openbsd-arm64": "0.27.7", "@esbuild/openbsd-x64": "0.27.7", "@esbuild/openharmony-arm64": "0.27.7", "@esbuild/sunos-x64": "0.27.7", "@esbuild/win32-arm64": "0.27.7", "@esbuild/win32-ia32": "0.27.7", "@esbuild/win32-x64": "0.27.7" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w=="], "tsyringe/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], @@ -4937,6 +4608,8 @@ "vite/esbuild": ["esbuild@0.27.7", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.7", "@esbuild/android-arm": "0.27.7", "@esbuild/android-arm64": "0.27.7", "@esbuild/android-x64": "0.27.7", "@esbuild/darwin-arm64": "0.27.7", "@esbuild/darwin-x64": "0.27.7", "@esbuild/freebsd-arm64": "0.27.7", "@esbuild/freebsd-x64": "0.27.7", "@esbuild/linux-arm": "0.27.7", "@esbuild/linux-arm64": "0.27.7", "@esbuild/linux-ia32": "0.27.7", "@esbuild/linux-loong64": "0.27.7", "@esbuild/linux-mips64el": "0.27.7", "@esbuild/linux-ppc64": "0.27.7", "@esbuild/linux-riscv64": "0.27.7", "@esbuild/linux-s390x": "0.27.7", "@esbuild/linux-x64": "0.27.7", "@esbuild/netbsd-arm64": "0.27.7", "@esbuild/netbsd-x64": "0.27.7", "@esbuild/openbsd-arm64": "0.27.7", "@esbuild/openbsd-x64": "0.27.7", "@esbuild/openharmony-arm64": "0.27.7", "@esbuild/sunos-x64": "0.27.7", "@esbuild/win32-arm64": "0.27.7", "@esbuild/win32-ia32": "0.27.7", "@esbuild/win32-x64": "0.27.7" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w=="], + "whatwg-encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "xml-crypto/xpath": ["xpath@0.0.33", "", {}, "sha512-NNXnzrkDrAzalLhIUc01jO2mOzXGXh1JwPgkihcLLzw98c0WgYDmmjSh1Kl3wzaxSVWMuA+fe0WTWOBDWCBmNA=="], "xml2js/xmlbuilder": ["xmlbuilder@11.0.1", "", {}, "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="], @@ -4951,129 +4624,9 @@ "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="], - "@aws-sdk/client-bedrock-runtime/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], - - "@aws-sdk/client-rds-data/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/client-rds-data/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], - - "@aws-sdk/client-rds-data/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], - - "@aws-sdk/client-rds-data/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], - - "@aws-sdk/client-rds-data/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], - - "@aws-sdk/client-rds-data/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], - - "@aws-sdk/client-rds-data/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], - - "@aws-sdk/client-sesv2/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], - - "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], - - "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], - - "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], - - "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], - - "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], - - "@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region/@aws-sdk/middleware-sdk-s3": ["@aws-sdk/middleware-sdk-s3@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-arn-parser": "3.893.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-config-provider": "^4.2.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-stream": "^4.5.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-JYkLjgS1wLoKHJ40G63+afM1ehmsPsjcmrHirKh8+kSCx4ip7+nL1e/twV4Zicxr8RJi9Y0Ahq5mDvneilDDKQ=="], - - "@aws-sdk/client-sqs/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.947.0", "", { "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-VR2V6dRELmzwAsCpK4GqxUi6UW5WNhAXS9F9AzWi5jvijwJo3nH92YNJUP4quMpgFZxJHEWyXLWgPjh9u0zYOA=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.947.0", "", { "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-inF09lh9SlHj63Vmr5d+LmwPXZc2IbK8lAruhOr3KLsZAIHEgHgGPXWDC2ukTEMzg0pkexQ6FOhXXad6klK4RA=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.947.0", "", { "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/credential-provider-env": "3.947.0", "@aws-sdk/credential-provider-http": "3.947.0", "@aws-sdk/credential-provider-login": "3.947.0", "@aws-sdk/credential-provider-process": "3.947.0", "@aws-sdk/credential-provider-sso": "3.947.0", "@aws-sdk/credential-provider-web-identity": "3.947.0", "@aws-sdk/nested-clients": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-A2ZUgJUJZERjSzvCi2NR/hBVbVkTXPD0SdKcR/aITb30XwF+n3T963b+pJl90qhOspoy7h0IVYNR7u5Nr9tJdQ=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.947.0", "", { "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-WpanFbHe08SP1hAJNeDdBDVz9SGgMu/gc0XJ9u3uNpW99nKZjDpvPRAdW7WLA4K6essMjxWkguIGNOpij6Do2Q=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.947.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.947.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/token-providers": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-NktnVHTGaUMaozxycYrepvb3yfFquHTQ53lt6hBEVjYBzK3C4tVz0siUpr+5RMGLSiZ5bLBp2UjJPgwx4i4waQ=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.947.0", "", { "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/nested-clients": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gokm/e/YHiHLrZgLq4j8tNAn8RJDPbIcglFRKgy08q8DmAqHQ8MXAKW3eS0QjAuRXU9mcMmUo1NrX6FRNBCCPw=="], - "@aws-sdk/client-sso/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - "@aws-sdk/lib-dynamodb/@aws-sdk/core/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], - - "@aws-sdk/lib-dynamodb/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], - - "@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], + "@aws-sdk/dynamodb-codec/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.22", "", { "dependencies": { "@nodable/entities": "2.1.0", "@smithy/types": "^4.14.1", "fast-xml-parser": "5.7.2", "tslib": "^2.6.2" } }, "sha512-PMYKKtJd70IsSG0yHrdAbxBr+ZWBKLvzFZfD3/urxgf6hXVMzuU5M+3MJ5G67RpOmLBu1fAUN65SbWuKUCOlAA=="], "@babel/helper-compilation-targets/lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], @@ -5141,8 +4694,6 @@ "@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], - "@opentelemetry/exporter-jaeger/@opentelemetry/sdk-trace-base/@opentelemetry/resources": ["@opentelemetry/resources@2.1.0", "", { "dependencies": { "@opentelemetry/core": "2.1.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1CJjf3LCvoefUOgegxi8h6r4B/wLSzInyhGP2UmIBYNlo4Qk5CZ73e1eEyWmfXvFtm1ybkmfb2DqWvspsYLrWw=="], - "@puppeteer/browsers/tar-fs/tar-stream": ["tar-stream@3.1.8", "", { "dependencies": { "b4a": "^1.6.4", "bare-fs": "^4.5.5", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-U6QpVRyCGHva435KoNWy9PRoi2IFYCgtEhq9nmrPPpbRacPs9IH4aJ3gbrFC8dPcXvdSZ4XXfXT5Fshbp2MtlQ=="], "@radix-ui/react-label/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.4", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA=="], @@ -5153,8 +4704,6 @@ "@radix-ui/react-visually-hidden/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.4", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA=="], - "@react-email/components/@react-email/render/prettier": ["prettier@3.4.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ=="], - "@sim/realtime/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], "@sim/security/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], @@ -5197,6 +4746,30 @@ "@types/node-fetch/@types/node/undici-types": ["undici-types@7.19.2", "", {}, "sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg=="], + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/core": ["@aws-sdk/core@3.940.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-KsGD2FLaX5ngJao1mHxodIVU9VYd1E8810fcYiGwO1PFHDzf5BEkp6D9IdMeQwT8Q6JLYtiiT1Y/o3UCScnGoA=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.940.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-ini": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-M8NFAvgvO6xZjiti5kztFiAYmSmSlG3eUfr4ZHSfXYZUA/KUdZU/D6xJyaLnU8cYRWBludb6K9XPKKVwKfqm4g=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws/lambda-invoke-store": "^0.2.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-ugHZEoktD/bG6mdgmhzLDjMP2VrYRAUPRPF1DpCyiZexkH7DCU7XrSJyXMvkcf0DHV+URk0q2sLf/oqn1D2uYw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/types": ["@aws-sdk/types@3.936.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-endpoints": "^3.2.5", "tslib": "^2.6.2" } }, "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/types": "^4.9.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.940.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dlD/F+L/jN26I8Zg5x0oDGJiA+/WEQmnSE27fi5ydvYnpfQLwThtQo9SsNS47XSR/SOULaaoC9qx929rZuo74A=="], + "@types/nodemailer/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], "@types/papaparse/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], @@ -5215,8 +4788,6 @@ "c12/chokidar/readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="], - "cheerio/htmlparser2/entities": ["entities@7.0.1", "", {}, "sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA=="], - "chrome-launcher/@types/node/undici-types": ["undici-types@7.19.2", "", {}, "sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg=="], "cli-truncate/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="], @@ -5227,10 +4798,6 @@ "concat-stream/readable-stream/string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], - "concurrently/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - - "critters/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "cytoscape-fcose/cose-base/layout-base": ["layout-base@2.0.1", "", {}, "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg=="], "d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="], @@ -5341,9 +4908,7 @@ "groq-sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], - "imapflow/pino/thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="], - - "inquirer/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], + "html-to-text/htmlparser2/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], "inquirer/ora/is-interactive": ["is-interactive@1.0.0", "", {}, "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w=="], @@ -5353,8 +4918,6 @@ "isomorphic-unfetch/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], - "langsmith/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "lint-staged/listr2/cli-truncate": ["cli-truncate@4.0.0", "", { "dependencies": { "slice-ansi": "^5.0.0", "string-width": "^7.0.0" } }, "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA=="], "lint-staged/listr2/log-update": ["log-update@6.1.0", "", { "dependencies": { "ansi-escapes": "^7.0.0", "cli-cursor": "^5.0.0", "slice-ansi": "^7.1.0", "strip-ansi": "^7.1.0", "wrap-ansi": "^9.0.0" } }, "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w=="], @@ -5485,8 +5048,6 @@ "sim/tailwindcss/postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="], - "simstudio/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "tar-stream/readable-stream/string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], "test-exclude/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], @@ -5605,56 +5166,8 @@ "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="], - "@aws-sdk/client-bedrock-runtime/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/client-bedrock-runtime/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/client-cloudwatch-logs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/client-cloudwatch/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/client-dynamodb/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], - - "@aws-sdk/client-rds-data/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/client-rds-data/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], - - "@aws-sdk/client-sesv2/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], - - "@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region/@aws-sdk/middleware-sdk-s3/@aws-sdk/util-arn-parser": ["@aws-sdk/util-arn-parser@3.893.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA=="], - - "@aws-sdk/client-sqs/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.947.0", "", { "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/nested-clients": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-u7M3hazcB7aJiVwosNdJRbIJDzbwQ861NTtl6S0HmvWpixaVb7iyhJZWg8/plyUznboZGBm7JVEdxtxv3u0bTA=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.947.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.947.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.7", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.14", "@smithy/middleware-retry": "^4.4.14", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.13", "@smithy/util-defaults-mode-node": "^4.2.16", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-DjRJEYNnHUTu9kGPPQDTSXquwSEd6myKR4ssI4FaYLFhdT3ldWpj73yYt807H3tdmhS7vPmdVqchSJnjurUQAw=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.947.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.947.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.7", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.14", "@smithy/middleware-retry": "^4.4.14", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.13", "@smithy/util-defaults-mode-node": "^4.2.16", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-sDwcO8SP290WSErY1S8pz8hTafeghKmmWjNVks86jDK30wx62CfazOTeU70IpWgrUBEygyXk/zPogHsUMbW2Rg=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.947.0", "", { "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/nested-clients": "3.947.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-X/DyB8GuK44rsE89Tn5+s542B3PhGbXQSgV8lvqHDzvicwCt0tWny6790st6CPETrVVV2K3oJMfG5U3/jAmaZA=="], - - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.947.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.947.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.7", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.14", "@smithy/middleware-retry": "^4.4.14", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.13", "@smithy/util-defaults-mode-node": "^4.2.16", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-DjRJEYNnHUTu9kGPPQDTSXquwSEd6myKR4ssI4FaYLFhdT3ldWpj73yYt807H3tdmhS7vPmdVqchSJnjurUQAw=="], - "@aws-sdk/client-sso/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - "@aws-sdk/lib-dynamodb/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - - "@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], - "@browserbasehq/sdk/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], "@browserbasehq/sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], @@ -5677,6 +5190,22 @@ "@trigger.dev/core/socket.io/engine.io/ws": ["ws@8.17.1", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ=="], + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/G3l5/wbZYP2XEQiOoIkRJmlv15f1P3MSd1a0gz27lHEMrOJOGq66rF1Ca4OJLzapWt3Fy9BPrZAepoAX11kMw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-dOrc03DHElNBD6N9Okt4U0zhrG4Wix5QUBSZPr5VN8SvmjD9dkrrxOkkJaMCl/bzrW7kbQEp7LuBdbxArMmOZQ=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-env": "3.940.0", "@aws-sdk/credential-provider-http": "3.940.0", "@aws-sdk/credential-provider-login": "3.940.0", "@aws-sdk/credential-provider-process": "3.940.0", "@aws-sdk/credential-provider-sso": "3.940.0", "@aws-sdk/credential-provider-web-identity": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-gn7PJQEzb/cnInNFTOaDoCN/hOKqMejNmLof1W5VW95Qk0TPO52lH8R4RmJPnRrwFMswOWswTOpR1roKNLIrcw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-pILBzt5/TYCqRsJb7vZlxmRIe0/T+FZPeml417EK75060ajDGnVJjHcuVdLVIeKoTKm9gmJc9l45gon6PbHyUQ=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.940.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.940.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-q6JMHIkBlDCOMnA3RAzf8cGfup+8ukhhb50fNpghMs1SNBGhanmaMbZSgLigBRsPQW7fOk2l8jnzdVLS+BB9Uw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9QLTIkDJHHaYL0nyymO41H8g3ui1yz6Y3GmAN1gYQa6plXisuFBnGAbmKVj7zNvjWaOKdF0dV3dd3AFKEDoJ/w=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region/@aws-sdk/middleware-sdk-s3": ["@aws-sdk/middleware-sdk-s3@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-arn-parser": "3.893.0", "@smithy/core": "^3.18.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/util-config-provider": "^4.2.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-stream": "^4.5.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-JYkLjgS1wLoKHJ40G63+afM1ehmsPsjcmrHirKh8+kSCx4ip7+nL1e/twV4Zicxr8RJi9Y0Ahq5mDvneilDDKQ=="], + "cli-truncate/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="], "fetch-cookie/tough-cookie/tldts/tldts-core": ["tldts-core@7.0.29", "", {}, "sha512-W99NuU7b1DcG3uJ3v9k9VztCH3WialNbBkBft5wCs8V8mexu0XQqaZEYb9l9RNNzK8+3EJ9PKWB0/RUtTQ/o+Q=="], @@ -5753,14 +5282,24 @@ "test-exclude/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - "@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.947.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.947.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.7", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.14", "@smithy/middleware-retry": "^4.4.14", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.13", "@smithy/util-defaults-mode-node": "^4.2.16", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-DjRJEYNnHUTu9kGPPQDTSXquwSEd6myKR4ssI4FaYLFhdT3ldWpj73yYt807H3tdmhS7vPmdVqchSJnjurUQAw=="], - "@browserbasehq/stagehand/@anthropic-ai/sdk/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], "@browserbasehq/stagehand/@anthropic-ai/sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], "@trigger.dev/core/socket.io/engine.io/@types/node/undici-types": ["undici-types@7.19.2", "", {}, "sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg=="], + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-fOKC3VZkwa9T2l2VFKWRtfHQPQuISqqNl35ZhcXjWKVwRwl/o7THPMkqI4XwgT2noGa7LLYVbWMwnsgSsBqglg=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-x0mdv6DkjXqXEcQj3URbCltEzW6hoy/1uIL+i8gExP6YKrnhiZ7SzuB4gPls2UOpK5UqLiqXjhRLfBb1C9i4Dw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/nested-clients": "3.940.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-k5qbRe/ZFjW9oWEdzLIa2twRVIEx7p/9rutofyrRysrtEnYh3HAWCngAnwbgKMoiwa806UzcTRx0TjyEpnKcCg=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-x0mdv6DkjXqXEcQj3URbCltEzW6hoy/1uIL+i8gExP6YKrnhiZ7SzuB4gPls2UOpK5UqLiqXjhRLfBb1C9i4Dw=="], + + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/signature-v4-multi-region/@aws-sdk/middleware-sdk-s3/@aws-sdk/util-arn-parser": ["@aws-sdk/util-arn-parser@3.893.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA=="], + "lint-staged/listr2/cli-truncate/string-width/strip-ansi": ["strip-ansi@7.2.0", "", { "dependencies": { "ansi-regex": "^6.2.2" } }, "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w=="], "lint-staged/listr2/log-update/cli-cursor/restore-cursor": ["restore-cursor@5.1.0", "", { "dependencies": { "onetime": "^7.0.0", "signal-exit": "^4.1.0" } }, "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA=="], @@ -5785,6 +5324,8 @@ "test-exclude/glob/jackspeak/@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], + "@types/nodemailer/@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-x0mdv6DkjXqXEcQj3URbCltEzW6hoy/1uIL+i8gExP6YKrnhiZ7SzuB4gPls2UOpK5UqLiqXjhRLfBb1C9i4Dw=="], + "lint-staged/listr2/cli-truncate/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="], "lint-staged/listr2/log-update/cli-cursor/restore-cursor/onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="], From 773720ec7fba69e70178ac4f80f123027a86de13 Mon Sep 17 00:00:00 2001 From: Waleed Date: Sat, 9 May 2026 10:36:57 -0700 Subject: [PATCH 28/33] fix(uploads): allow images/video/audio in mothership presigned route (#4534) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(uploads): allow image/video/audio attachments in mothership presigned route The mothership branch of the presigned upload route called validateFileType, which only permits SUPPORTED_DOCUMENT_EXTENSIONS — rejecting PNG screenshots and other media users have always been able to attach via the legacy /api/files/upload mothership branch. Introduce validateAttachmentFileType, backed by the union of document, code, image, audio, and video extensions, and wire it into the mothership branch. Co-Authored-By: Claude Opus 4.7 * fix(uploads): allow non-document execution outputs in presigned route The execution branch of the presigned upload route called validateFileType, which only permits documents — but workflow execution outputs are arbitrary by design (images, audio, video, code). The legacy /api/files/upload execution branch had no docs-only gate, so the staging refactor to presigned PUTs regressed parity. Switch execution to validateAttachmentFileType to match prior behavior. Co-Authored-By: Claude Opus 4.7 * test(uploads): add coverage for attachment validator and dedupe extensions - Dedupe SUPPORTED_ATTACHMENT_EXTENSIONS (webm appears in both audio and video lists) - Reuse SUPPORTED_ATTACHMENT_EXTENSIONS in /api/files/upload to avoid drift with the presigned route - Add unit tests for validateAttachmentFileType - Add presigned route tests covering mothership/execution/knowledge-base validator selection and permission gating * fix(uploads): restore SUPPORTED_IMAGE_EXTENSIONS import in upload route Build broke because biome auto-fix collapsed my multi-import edit and dropped SUPPORTED_IMAGE_EXTENSIONS, which is still referenced for the generic-MIME image fallback at line 298. --------- Co-authored-by: Claude Opus 4.7 --- .../sim/app/api/files/presigned/route.test.ts | 189 ++++++++++++++++++ apps/sim/app/api/files/presigned/route.ts | 6 +- apps/sim/app/api/files/upload/route.ts | 13 +- apps/sim/lib/uploads/utils/validation.test.ts | 73 +++++++ apps/sim/lib/uploads/utils/validation.ts | 31 +++ 5 files changed, 298 insertions(+), 14 deletions(-) create mode 100644 apps/sim/lib/uploads/utils/validation.test.ts diff --git a/apps/sim/app/api/files/presigned/route.test.ts b/apps/sim/app/api/files/presigned/route.test.ts index 6ae6a10ed5e..7c4893dc44d 100644 --- a/apps/sim/app/api/files/presigned/route.test.ts +++ b/apps/sim/app/api/files/presigned/route.test.ts @@ -17,10 +17,14 @@ const { mockIsUsingCloudStorage, mockGetStorageProvider, mockValidateFileType, + mockValidateAttachmentFileType, mockGenerateCopilotUploadUrl, mockIsImageFileType, mockGetStorageProviderUploads, mockIsUsingCloudStorageUploads, + mockGetUserEntityPermissions, + mockGenerateWorkspaceFileKey, + mockGenerateExecutionFileKey, } = vi.hoisted(() => ({ mockVerifyFileAccess: vi.fn().mockResolvedValue(true), mockVerifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true), @@ -30,6 +34,7 @@ const { mockIsUsingCloudStorage: vi.fn(), mockGetStorageProvider: vi.fn(), mockValidateFileType: vi.fn().mockReturnValue(null), + mockValidateAttachmentFileType: vi.fn().mockReturnValue(null), mockGenerateCopilotUploadUrl: vi.fn().mockResolvedValue({ url: 'https://example.com/presigned-url', key: 'copilot/test-key.txt', @@ -37,6 +42,14 @@ const { mockIsImageFileType: vi.fn().mockReturnValue(true), mockGetStorageProviderUploads: vi.fn(), mockIsUsingCloudStorageUploads: vi.fn(), + mockGetUserEntityPermissions: vi.fn().mockResolvedValue('admin'), + mockGenerateWorkspaceFileKey: vi.fn( + (workspaceId: string, fileName: string) => `workspace/${workspaceId}/${fileName}` + ), + mockGenerateExecutionFileKey: vi.fn( + (ctx: { workspaceId: string; workflowId: string; executionId: string }, fileName: string) => + `execution/${ctx.workspaceId}/${ctx.workflowId}/${ctx.executionId}/${fileName}` + ), })) vi.mock('@/app/api/files/authorization', () => ({ @@ -61,6 +74,19 @@ vi.mock('@/lib/uploads/core/storage-service', () => storageServiceMock) vi.mock('@/lib/uploads/utils/validation', () => ({ validateFileType: mockValidateFileType, + validateAttachmentFileType: mockValidateAttachmentFileType, +})) + +vi.mock('@/lib/workspaces/permissions/utils', () => ({ + getUserEntityPermissions: mockGetUserEntityPermissions, +})) + +vi.mock('@/lib/uploads/contexts/workspace/workspace-file-manager', () => ({ + generateWorkspaceFileKey: mockGenerateWorkspaceFileKey, +})) + +vi.mock('@/lib/uploads/contexts/execution/utils', () => ({ + generateExecutionFileKey: mockGenerateExecutionFileKey, })) vi.mock('@/lib/uploads/utils/file-utils', () => ({ @@ -139,6 +165,8 @@ function setupFileApiMocks( ) mockValidateFileType.mockReturnValue(null) + mockValidateAttachmentFileType.mockReturnValue(null) + mockGetUserEntityPermissions.mockResolvedValue('admin') mockGetStorageProviderUploads.mockReturnValue( storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local' @@ -518,6 +546,167 @@ describe('/api/files/presigned', () => { }) }) + describe('mothership uploads', () => { + it('uses validateAttachmentFileType (not validateFileType) — accepts images', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=mothership&workspaceId=ws-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'screenshot.png', + contentType: 'image/png', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + expect(response.status).toBe(200) + expect(mockValidateAttachmentFileType).toHaveBeenCalledWith('screenshot.png') + expect(mockValidateFileType).not.toHaveBeenCalled() + }) + + it('rejects unsupported types when validator returns an error', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + mockValidateAttachmentFileType.mockReturnValue({ + code: 'UNSUPPORTED_FILE_TYPE', + message: 'Unsupported file type: exe.', + supportedTypes: [], + }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=mothership&workspaceId=ws-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'virus.exe', + contentType: 'application/octet-stream', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + const data = await response.json() + expect(response.status).toBe(400) + expect(data.code).toBe('VALIDATION_ERROR') + expect(data.error).toContain('exe') + }) + + it('returns 403 when user lacks workspace write permission', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + mockGetUserEntityPermissions.mockResolvedValue('read') + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=mothership&workspaceId=ws-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'doc.pdf', + contentType: 'application/pdf', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + expect(response.status).toBe(403) + }) + }) + + describe('execution uploads', () => { + it('uses validateAttachmentFileType — accepts video', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=execution&workspaceId=ws-1&workflowId=wf-1&executionId=exec-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'output.mp4', + contentType: 'video/mp4', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + expect(response.status).toBe(200) + expect(mockValidateAttachmentFileType).toHaveBeenCalledWith('output.mp4') + expect(mockValidateFileType).not.toHaveBeenCalled() + }) + + it('rejects when validator returns an error', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + mockValidateAttachmentFileType.mockReturnValue({ + code: 'UNSUPPORTED_FILE_TYPE', + message: 'Unsupported file type: bin.', + supportedTypes: [], + }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=execution&workspaceId=ws-1&workflowId=wf-1&executionId=exec-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'blob.bin', + contentType: 'application/octet-stream', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + const data = await response.json() + expect(response.status).toBe(400) + expect(data.code).toBe('VALIDATION_ERROR') + }) + + it('returns 400 when missing workflowId/executionId', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=execution&workspaceId=ws-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'output.mp4', + contentType: 'video/mp4', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + expect(response.status).toBe(400) + }) + }) + + describe('knowledge-base uploads', () => { + it('uses validateFileType (docs-only), not validateAttachmentFileType', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=knowledge-base', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'doc.pdf', + contentType: 'application/pdf', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + expect(response.status).toBe(200) + expect(mockValidateFileType).toHaveBeenCalledWith('doc.pdf', 'application/pdf') + expect(mockValidateAttachmentFileType).not.toHaveBeenCalled() + }) + }) + describe('OPTIONS', () => { it('should handle CORS preflight requests', async () => { const response = await OPTIONS() diff --git a/apps/sim/app/api/files/presigned/route.ts b/apps/sim/app/api/files/presigned/route.ts index c8fb824b3c9..7c4bb01ec64 100644 --- a/apps/sim/app/api/files/presigned/route.ts +++ b/apps/sim/app/api/files/presigned/route.ts @@ -11,7 +11,7 @@ import { generateExecutionFileKey } from '@/lib/uploads/contexts/execution/utils import { generateWorkspaceFileKey } from '@/lib/uploads/contexts/workspace/workspace-file-manager' import { generatePresignedUploadUrl, hasCloudStorage } from '@/lib/uploads/core/storage-service' import { isImageFileType } from '@/lib/uploads/utils/file-utils' -import { validateFileType } from '@/lib/uploads/utils/validation' +import { validateAttachmentFileType, validateFileType } from '@/lib/uploads/utils/validation' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { createErrorResponse } from '@/app/api/files/utils' @@ -141,7 +141,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { ) } - const fileValidationError = validateFileType(fileName, contentType) + const fileValidationError = validateAttachmentFileType(fileName) if (fileValidationError) { throw new ValidationError(fileValidationError.message) } @@ -175,7 +175,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { ) } - const fileValidationError = validateFileType(fileName, contentType) + const fileValidationError = validateAttachmentFileType(fileName) if (fileValidationError) { throw new ValidationError(fileValidationError.message) } diff --git a/apps/sim/app/api/files/upload/route.ts b/apps/sim/app/api/files/upload/route.ts index 424935f5941..828cf83aa09 100644 --- a/apps/sim/app/api/files/upload/route.ts +++ b/apps/sim/app/api/files/upload/route.ts @@ -15,11 +15,8 @@ import type { StorageContext } from '@/lib/uploads/config' import { generateWorkspaceFileKey } from '@/lib/uploads/contexts/workspace/workspace-file-manager' import { isImageFileType, resolveFileType } from '@/lib/uploads/utils/file-utils' import { - SUPPORTED_AUDIO_EXTENSIONS, - SUPPORTED_CODE_EXTENSIONS, - SUPPORTED_DOCUMENT_EXTENSIONS, + SUPPORTED_ATTACHMENT_EXTENSIONS, SUPPORTED_IMAGE_EXTENSIONS, - SUPPORTED_VIDEO_EXTENSIONS, validateFileType, } from '@/lib/uploads/utils/validation' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' @@ -29,13 +26,7 @@ import { InvalidRequestError, } from '@/app/api/files/utils' -const ALLOWED_EXTENSIONS = new Set([ - ...SUPPORTED_DOCUMENT_EXTENSIONS, - ...SUPPORTED_CODE_EXTENSIONS, - ...SUPPORTED_IMAGE_EXTENSIONS, - ...SUPPORTED_AUDIO_EXTENSIONS, - ...SUPPORTED_VIDEO_EXTENSIONS, -]) +const ALLOWED_EXTENSIONS = new Set(SUPPORTED_ATTACHMENT_EXTENSIONS) function validateFileExtension(filename: string): boolean { const extension = filename.split('.').pop()?.toLowerCase() diff --git a/apps/sim/lib/uploads/utils/validation.test.ts b/apps/sim/lib/uploads/utils/validation.test.ts new file mode 100644 index 00000000000..f5db99cbd09 --- /dev/null +++ b/apps/sim/lib/uploads/utils/validation.test.ts @@ -0,0 +1,73 @@ +import { describe, expect, it } from 'vitest' +import { + SUPPORTED_ATTACHMENT_EXTENSIONS, + validateAttachmentFileType, +} from '@/lib/uploads/utils/validation' + +describe('validateAttachmentFileType', () => { + it('accepts image files (png, jpg, gif, webp, svg)', () => { + expect(validateAttachmentFileType('screenshot.png')).toBeNull() + expect(validateAttachmentFileType('photo.jpg')).toBeNull() + expect(validateAttachmentFileType('photo.JPEG')).toBeNull() + expect(validateAttachmentFileType('animation.gif')).toBeNull() + expect(validateAttachmentFileType('image.webp')).toBeNull() + expect(validateAttachmentFileType('icon.svg')).toBeNull() + }) + + it('accepts video files (mp4, mov, webm)', () => { + expect(validateAttachmentFileType('clip.mp4')).toBeNull() + expect(validateAttachmentFileType('clip.mov')).toBeNull() + expect(validateAttachmentFileType('clip.webm')).toBeNull() + }) + + it('accepts audio files (mp3, wav, m4a)', () => { + expect(validateAttachmentFileType('voice.mp3')).toBeNull() + expect(validateAttachmentFileType('voice.wav')).toBeNull() + expect(validateAttachmentFileType('voice.m4a')).toBeNull() + }) + + it('accepts document files (pdf, docx, csv, md)', () => { + expect(validateAttachmentFileType('report.pdf')).toBeNull() + expect(validateAttachmentFileType('letter.docx')).toBeNull() + expect(validateAttachmentFileType('data.csv')).toBeNull() + expect(validateAttachmentFileType('notes.md')).toBeNull() + }) + + it('accepts code files (ts, py, sh, json)', () => { + expect(validateAttachmentFileType('app.ts')).toBeNull() + expect(validateAttachmentFileType('main.py')).toBeNull() + expect(validateAttachmentFileType('script.sh')).toBeNull() + expect(validateAttachmentFileType('config.json')).toBeNull() + }) + + it('rejects executables and unknown extensions', () => { + expect(validateAttachmentFileType('virus.exe')?.code).toBe('UNSUPPORTED_FILE_TYPE') + expect(validateAttachmentFileType('installer.msi')?.code).toBe('UNSUPPORTED_FILE_TYPE') + expect(validateAttachmentFileType('archive.dmg')?.code).toBe('UNSUPPORTED_FILE_TYPE') + expect(validateAttachmentFileType('binary.bin')?.code).toBe('UNSUPPORTED_FILE_TYPE') + }) + + it('rejects files with no extension', () => { + const result = validateAttachmentFileType('README') + expect(result?.code).toBe('UNSUPPORTED_FILE_TYPE') + expect(result?.message).toContain('README') + }) + + it('rejects files with non-alphanumeric extensions', () => { + expect(validateAttachmentFileType('odd.<>')?.code).toBe('UNSUPPORTED_FILE_TYPE') + expect(validateAttachmentFileType('foo. ')?.code).toBe('UNSUPPORTED_FILE_TYPE') + }) + + it('does not contain duplicate extensions (e.g. webm)', () => { + const seen = new Set() + for (const ext of SUPPORTED_ATTACHMENT_EXTENSIONS) { + expect(seen.has(ext)).toBe(false) + seen.add(ext) + } + }) + + it('returns supportedTypes list in error', () => { + const result = validateAttachmentFileType('foo.exe') + expect(result?.supportedTypes).toEqual(expect.arrayContaining(['png', 'pdf', 'mp4', 'mp3'])) + }) +}) diff --git a/apps/sim/lib/uploads/utils/validation.ts b/apps/sim/lib/uploads/utils/validation.ts index af0a5581fba..4f46d67516a 100644 --- a/apps/sim/lib/uploads/utils/validation.ts +++ b/apps/sim/lib/uploads/utils/validation.ts @@ -219,6 +219,37 @@ export interface FileValidationError { supportedTypes: string[] } +export const SUPPORTED_ATTACHMENT_EXTENSIONS = Array.from( + new Set([ + ...SUPPORTED_DOCUMENT_EXTENSIONS, + ...SUPPORTED_CODE_EXTENSIONS, + ...SUPPORTED_IMAGE_EXTENSIONS, + ...SUPPORTED_AUDIO_EXTENSIONS, + ...SUPPORTED_VIDEO_EXTENSIONS, + ]) +) as readonly string[] + +/** + * Validate that a file's extension is allowed as a chat/mothership attachment. + * + * Permits documents, code, images, audio, and video — anything users would + * reasonably attach to a chat message. Rejects executables and unknown types. + */ +export function validateAttachmentFileType(fileName: string): FileValidationError | null { + const raw = extractExtension(fileName) + const extension = isAlphanumericExtension(raw) ? raw : '' + + if (!SUPPORTED_ATTACHMENT_EXTENSIONS.includes(extension)) { + return { + code: 'UNSUPPORTED_FILE_TYPE', + message: `Unsupported file type${extension ? `: ${extension}` : ` for "${fileName}"`}. Supported types include documents, code, images, audio, and video.`, + supportedTypes: [...SUPPORTED_ATTACHMENT_EXTENSIONS], + } + } + + return null +} + /** * Validate if a file type is supported for document processing */ From 4cc00b2ef9dc73bd042636c12e318e97fdbd842f Mon Sep 17 00:00:00 2001 From: Waleed Date: Sat, 9 May 2026 12:19:43 -0700 Subject: [PATCH 29/33] fix(security): enforce workspace scope on workflow middleware and validate shopify shop domain (#4535) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(security): enforce workspace scope on workflow middleware and validate shopify shop domain - validateWorkflowAccess now rejects workspace-scoped API keys whose workspaceId doesn't match the workflow's workspace, closing a boundary leak across /api/workflows/[id]/{log,paused,status} and /api/resume/[workflowId]/[executionId]/[contextId] - shopify authorize route now validates the resolved shop domain against shopifyShopDomainSchema before proceeding - adds middleware tests covering workspace/personal/session auth paths * fix(shopify): disallow trailing hyphen in shop subdomain regex * fix(shopify): align shop domain regex with shopify handle rules (3-60 lowercase, no edge hyphens) * fix(security): widen shopify subdomain regex to allow up to 63 chars Shopify and RFC 1123 allow labels up to 63 chars; the previous {1,58} quantifier capped the subdomain at 60 chars and rejected valid 61–63 char shops with a 400. Co-Authored-By: Claude Opus 4.7 --------- Co-authored-by: Claude Opus 4.7 --- .../app/api/auth/shopify/authorize/route.ts | 10 +- apps/sim/app/api/workflows/middleware.test.ts | 130 ++++++++++++++++++ apps/sim/app/api/workflows/middleware.ts | 9 ++ .../lib/api/contracts/oauth-connections.ts | 2 +- 4 files changed, 149 insertions(+), 2 deletions(-) create mode 100644 apps/sim/app/api/workflows/middleware.test.ts diff --git a/apps/sim/app/api/auth/shopify/authorize/route.ts b/apps/sim/app/api/auth/shopify/authorize/route.ts index 6bb1a94ffd9..43be71dfd17 100644 --- a/apps/sim/app/api/auth/shopify/authorize/route.ts +++ b/apps/sim/app/api/auth/shopify/authorize/route.ts @@ -1,7 +1,10 @@ import { createLogger } from '@sim/logger' import { generateId } from '@sim/utils/id' import { type NextRequest, NextResponse } from 'next/server' -import { shopifyAuthorizeQuerySchema } from '@/lib/api/contracts/oauth-connections' +import { + shopifyAuthorizeQuerySchema, + shopifyShopDomainSchema, +} from '@/lib/api/contracts/oauth-connections' import { getSession } from '@/lib/auth' import { env } from '@/lib/core/config/env' import { getBaseUrl } from '@/lib/core/utils/urls' @@ -161,6 +164,11 @@ export const GET = withRouteHandler(async (request: NextRequest) => { cleanShop = `${cleanShop.replace('.myshopify.com', '')}.myshopify.com` } + if (!shopifyShopDomainSchema.safeParse(cleanShop).success) { + logger.warn('Rejected invalid Shopify shop domain', { shop: shopDomain }) + return NextResponse.json({ error: 'Invalid Shopify shop domain' }, { status: 400 }) + } + const baseUrl = getBaseUrl() const redirectUri = `${baseUrl}/api/auth/oauth2/callback/shopify` diff --git a/apps/sim/app/api/workflows/middleware.test.ts b/apps/sim/app/api/workflows/middleware.test.ts new file mode 100644 index 00000000000..996466426da --- /dev/null +++ b/apps/sim/app/api/workflows/middleware.test.ts @@ -0,0 +1,130 @@ +/** + * Tests for workflow access middleware — focused on the workspace-scoped + * API key boundary check in the `requireDeployment=false` branch. + * + * @vitest-environment node + */ + +import { + hybridAuthMockFns, + workflowAuthzMock, + workflowAuthzMockFns, + workflowsUtilsMock, + workflowsUtilsMockFns, +} from '@sim/testing' +import { NextRequest } from 'next/server' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +vi.mock('@/lib/workflows/utils', () => workflowsUtilsMock) +vi.mock('@sim/workflow-authz', () => workflowAuthzMock) +vi.mock('@/lib/api-key/service', () => ({ + authenticateApiKeyFromHeader: vi.fn(), + updateApiKeyLastUsed: vi.fn(), +})) + +import { validateWorkflowAccess } from '@/app/api/workflows/middleware' + +function makeRequest() { + return new NextRequest(new URL('https://example.com/api/workflows/wf-1/log')) +} + +describe('validateWorkflowAccess (requireDeployment=false)', () => { + beforeEach(() => { + vi.clearAllMocks() + workflowsUtilsMockFns.mockGetWorkflowById.mockResolvedValue({ + id: 'wf-1', + workspaceId: 'ws-A', + isDeployed: true, + }) + workflowAuthzMockFns.mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ + allowed: true, + status: 200, + workflow: { id: 'wf-1', workspaceId: 'ws-A' }, + }) + }) + + it('rejects a workspace-scoped API key issued for a different workspace', async () => { + hybridAuthMockFns.mockCheckHybridAuth.mockResolvedValueOnce({ + success: true, + userId: 'user-1', + authType: 'api_key', + apiKeyType: 'workspace', + workspaceId: 'ws-B', + }) + + const result = await validateWorkflowAccess(makeRequest(), 'wf-1', false) + + expect(result.error).toEqual({ + message: 'API key is not authorized for this workspace', + status: 403, + }) + expect(workflowAuthzMockFns.mockAuthorizeWorkflowByWorkspacePermission).not.toHaveBeenCalled() + }) + + it('allows a workspace-scoped API key issued for the matching workspace', async () => { + hybridAuthMockFns.mockCheckHybridAuth.mockResolvedValueOnce({ + success: true, + userId: 'user-1', + authType: 'api_key', + apiKeyType: 'workspace', + workspaceId: 'ws-A', + }) + + const result = await validateWorkflowAccess(makeRequest(), 'wf-1', false) + + expect(result.error).toBeUndefined() + expect(result.workflow).toBeDefined() + expect(result.auth?.workspaceId).toBe('ws-A') + expect(workflowAuthzMockFns.mockAuthorizeWorkflowByWorkspacePermission).toHaveBeenCalledWith({ + workflowId: 'wf-1', + userId: 'user-1', + action: 'read', + }) + }) + + it('allows a personal API key regardless of workspaceId on the auth result', async () => { + hybridAuthMockFns.mockCheckHybridAuth.mockResolvedValueOnce({ + success: true, + userId: 'user-1', + authType: 'api_key', + apiKeyType: 'personal', + workspaceId: 'ws-B', + }) + + const result = await validateWorkflowAccess(makeRequest(), 'wf-1', false) + + expect(result.error).toBeUndefined() + expect(result.workflow).toBeDefined() + }) + + it('allows session auth (no apiKeyType) when workspace permission grants access', async () => { + hybridAuthMockFns.mockCheckHybridAuth.mockResolvedValueOnce({ + success: true, + userId: 'user-1', + authType: 'session', + }) + + const result = await validateWorkflowAccess(makeRequest(), 'wf-1', false) + + expect(result.error).toBeUndefined() + expect(result.workflow).toBeDefined() + }) + + it('still enforces workspace-permission rejection for personal keys', async () => { + hybridAuthMockFns.mockCheckHybridAuth.mockResolvedValueOnce({ + success: true, + userId: 'user-1', + authType: 'api_key', + apiKeyType: 'personal', + }) + workflowAuthzMockFns.mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValueOnce({ + allowed: false, + status: 403, + message: 'Access denied', + }) + + const result = await validateWorkflowAccess(makeRequest(), 'wf-1', false) + + expect(result.error).toEqual({ message: 'Access denied', status: 403 }) + }) +}) diff --git a/apps/sim/app/api/workflows/middleware.ts b/apps/sim/app/api/workflows/middleware.ts index 2a66a616c77..10fa3017727 100644 --- a/apps/sim/app/api/workflows/middleware.ts +++ b/apps/sim/app/api/workflows/middleware.ts @@ -54,6 +54,15 @@ export async function validateWorkflowAccess( } } + if (auth.apiKeyType === 'workspace' && auth.workspaceId !== workflow.workspaceId) { + return { + error: { + message: 'API key is not authorized for this workspace', + status: 403, + }, + } + } + const authorization = await authorizeWorkflowByWorkspacePermission({ workflowId, userId: auth.userId, diff --git a/apps/sim/lib/api/contracts/oauth-connections.ts b/apps/sim/lib/api/contracts/oauth-connections.ts index 03ec64d0a66..4915f2fd712 100644 --- a/apps/sim/lib/api/contracts/oauth-connections.ts +++ b/apps/sim/lib/api/contracts/oauth-connections.ts @@ -143,7 +143,7 @@ export const oauthAuthorizeParamsResponseSchema = z.object({ response_type: z.literal('code'), }) -const SHOPIFY_SHOP_DOMAIN_REGEX = /^[a-zA-Z0-9][a-zA-Z0-9-]*\.myshopify\.com$/ +const SHOPIFY_SHOP_DOMAIN_REGEX = /^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]\.myshopify\.com$/ export const shopifyShopDomainSchema = z.string().regex(SHOPIFY_SHOP_DOMAIN_REGEX) export const listOAuthConnectionsContract = defineRouteContract({ From c7130c6bf91a366fd886594477f66950815fad69 Mon Sep 17 00:00:00 2001 From: Waleed Date: Sat, 9 May 2026 13:21:42 -0700 Subject: [PATCH 30/33] fix(tables): fix bulk ops truncation for tables larger than one page (#4532) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(tables): fix bulk ops truncation for tables larger than one page Bulk operations (column-header delete, select-all copy/cut/delete/run) were silently truncated to the first 1000 rows because handlers only iterated the loaded pages from useInfiniteQuery. Fix: - Extract tableRowsInfiniteOptions factory (infiniteQueryOptions) so the hook and imperative drain share the same typed cache key - Add background drain via useEffect watching hasNextPage/isFetchingNextPage — chains fetchNextPage until getNextPageParam returns undefined - Add ensureAllRowsLoaded to use-table: reads cache via getQueryData + calls fetchNextPage in a while loop until the last page is partial - Await ensureAllRowsLoaded at every kind:'all' bulk-op entry point in table-grid (column delete, copy, cut, action-bar delete/run) - Add chunkBatchUpdates to send updates in MAX_BULK_OPERATION_SIZE=1000 chunks so server validation never rejects oversized batches - Fix undo-redo: make executeAction async and chunk clear-cells, update-cells, and delete-column cell-restore with mutateAsync loops Tests: 41 passing across use-table, tables queries, and use-table-undo * chore(tables): remove extraneous comments * fix(tables): add missing useEffect import; chunk range-selection delete and cut * fix(tables): add hasRunningGroupExecution and import it * fix(tables): define mergePagePreservingIdentity helper used in polling cache merge * fix(tables): define ROWS_POLL_INTERVAL_WHILE_RUNNING_MS constant used in polling loop * fix(tables): capture rowSel before await in delete handler; handle clipboard NotAllowedError * fix(tables): abort cut on clipboard NotAllowedError to prevent silent cell deletion * fix(tables): push undo before chunkBatchUpdates to survive partial chunk failures * fix(tables): use text input for number cells; idle poll backoff; csv error toast; column-cut drain * fix(tables): audit fixes — column-copy drain, polling scope, merge identity - Fix polling tick: move Promise.all inside else-branch so dirty[] stays in scope; keep hasDirty=true during active mutations so the short interval fires while chunked batch-updates are in flight - Add isColumnSelectionRef branch to handleCopy (mirrors handleCut fix): column-header Cmd+C now drains all pages before building clipboard content - Replace String(updatedAt) comparison in mergePagePreservingIdentity with Date.getTime() equality — handles ISO vs +00:00 timezone variants - Remove redundant batchUpdates.length > 0 guards at chunkBatchUpdates callsites (empty-array case is handled inside the function) - Export _mergePagePreservingIdentity for unit testing - Add 6 unit tests covering mergePagePreservingIdentity edge cases * improvement(tables): cleanup — extract components, stabilize callbacks, fix ref sync * improvement(tables): remove polling, eager drain, and parallelize batch updates - Drop the per-page polling loop — SSE stream already patches execution cell state in real time and invalidates on buffer prune; polling was redundant and burned CPU/network on every open table - Remove eager mount drain (fetchNextPage loop in use-table.ts); scroll handler and ensureAllRowsLoaded handle progressive/on-demand loading - Parallelize chunkBatchUpdates with a 3-worker pool instead of serial chunks, reducing bulk-op round-trips by ~3x - Delete mergePagePreservingIdentity and its tests (no longer called) * chore(tables): remove stale comments and dead defensive code - Fix chunkBatchUpdates JSDoc to reflect parallel dispatch (was "sequentially") - Inline CHUNK_CONCURRENCY=3, single-use constant needs no abstraction - Drop stale "Polls while any cell is in flight" from useTableRows JSDoc - Remove two generic "Validation errors surfaced by caller" comments - Remove ASCII separator line from workflow group mutations section - Remove dead `if (!variables) return` guard in useImportCsvIntoTable onSettled (TanStack v5 always provides variables to onSettled) * fix(tables): run-all selection sends all rows to server, not just loaded pages When rowSelection.kind === 'all', selectedRunScope now flags allRows: true. The action-bar run handlers pass rowIds: undefined to the server when allRows is set, matching the server contract (missing rowIds = run all eligible rows). Stop likewise routes through scope: 'all' instead of per-row cancels. Previously, selecting all rows and clicking Run would silently only run the rows loaded in the current infinite-query cache (potentially one page of 1000 on a 5000-row table). * test(tables): remove background-drain describe block (behavior intentionally removed) * fix(tables): surface CSV import error toast; remove dead hasRunningGroupExecution * fix(tables): make runWithoutRecording async-aware so undoRedoInProgress covers full undo execution * feat(tables): render URL cells with favicon and clickable link * feat(tables): clickable URL cells with favicons using tldts --- apps/sim/app/api/table/import-csv/route.ts | 6 +- .../table-grid/cells/cell-render.tsx | 118 +- .../cells/expanded-cell-popover.tsx | 10 +- .../table-grid/cells/inline-editors.tsx | 11 +- .../components/table-grid/constants.ts | 18 + .../components/table-grid/data-row.tsx | 322 +++++ .../components/table-grid/table-grid.tsx | 1163 ++++++----------- .../table-grid/table-primitives.tsx | 114 ++ .../[tableId]/components/table-grid/utils.ts | 43 + .../tables/[tableId]/hooks/use-table.test.ts | 238 ++++ .../tables/[tableId]/hooks/use-table.ts | 69 +- .../[workspaceId]/tables/[tableId]/table.tsx | 34 +- .../[workspaceId]/tables/[tableId]/utils.ts | 2 +- .../workspace/[workspaceId]/tables/tables.tsx | 11 - apps/sim/hooks/queries/tables.test.ts | 123 +- apps/sim/hooks/queries/tables.ts | 66 +- apps/sim/hooks/use-table-undo.test.ts | 230 ++++ apps/sim/hooks/use-table-undo.ts | 57 +- apps/sim/lib/table/constants.ts | 2 +- apps/sim/package.json | 1 + apps/sim/stores/table/store.ts | 9 +- bun.lock | 377 ++---- 22 files changed, 1884 insertions(+), 1140 deletions(-) create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/data-row.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/table-primitives.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/tables/[tableId]/hooks/use-table.test.ts create mode 100644 apps/sim/hooks/use-table-undo.test.ts diff --git a/apps/sim/app/api/table/import-csv/route.ts b/apps/sim/app/api/table/import-csv/route.ts index 66b0e1f3c0d..11951d0cb20 100644 --- a/apps/sim/app/api/table/import-csv/route.ts +++ b/apps/sim/app/api/table/import-csv/route.ts @@ -17,6 +17,7 @@ import { inferSchemaFromCsv, parseCsvBuffer, sanitizeName, + TABLE_LIMITS, type TableSchema, } from '@/lib/table' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' @@ -67,7 +68,10 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const { headers, rows } = await parseCsvBuffer(buffer, delimiter) const { columns, headerToColumn } = inferSchemaFromCsv(headers, rows) - const tableName = sanitizeName(file.name.replace(/\.[^.]+$/, ''), 'imported_table') + const tableName = sanitizeName(file.name.replace(/\.[^.]+$/, ''), 'imported_table').slice( + 0, + TABLE_LIMITS.MAX_TABLE_NAME_LENGTH + ) const planLimits = await getWorkspaceTableLimits(workspaceId) const normalizedSchema: TableSchema = { diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-render.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-render.tsx index 2de71e2a9e2..c2c78971d90 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-render.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-render.tsx @@ -1,6 +1,7 @@ 'use client' import type React from 'react' +import { parse } from 'tldts' import { Badge, Checkbox, Tooltip } from '@/components/emcn' import { cn } from '@/lib/core/utils/cn' import type { RowExecutionMetadata } from '@/lib/table' @@ -8,22 +9,6 @@ import { StatusBadge } from '@/app/workspace/[workspaceId]/logs/utils' import { storageToDisplay } from '../../../utils' import type { DisplayColumn } from '../types' -/** - * Discriminated union describing every shape a table cell can take. - * - * Workflow-output cells follow a status state machine: they always render - * *something* (a value, a status pill, or a dash), driven by the combination - * of `executions[groupId]` state and dep satisfaction. Plain (non-workflow) - * cells just render the typed value or empty. - * - * `'empty'` is the universal fallback used by both workflow cells (no exec, - * no value, no waiting) and plain cells (null/undefined value). - * - * Adding a new cell appearance is a three-step mechanical change: add a - * variant here, pick it in `resolveCellRender`, render it in `CellRender`. - * TypeScript's exhaustiveness check on the renderer's `switch` (the - * unreachable default) flags any branch you forgot. - */ export type CellRenderKind = // Workflow-output cells | { kind: 'value'; text: string } @@ -38,6 +23,7 @@ export type CellRenderKind = | { kind: 'boolean'; checked: boolean } | { kind: 'json'; text: string } | { kind: 'date'; text: string } + | { kind: 'url'; text: string; href: string; domain: string } | { kind: 'text'; text: string } // Universal fallback | { kind: 'empty' } @@ -46,20 +32,9 @@ interface ResolveCellRenderInput { value: unknown exec: RowExecutionMetadata | undefined column: DisplayColumn - /** Empty / undefined → not waiting; non-empty → render the Waiting pill. */ waitingOnLabels: string[] | undefined } -/** - * Decide which `CellRenderKind` to render for a cell. Pure — easily - * unit-testable in isolation, no JSX involved. - * - * Order matters for workflow cells: block-error wins over a value (the user - * cares about the failure), value wins over running/queued (we have data - * already), and the running/queued branch deliberately collapses pre-enqueue - * `pending` and post-enqueue `queued` into one `Queued` pill so the cell - * doesn't flicker as the row transitions from one to the other. - */ export function resolveCellRender({ value, exec, @@ -76,31 +51,20 @@ export function resolveCellRender({ if (blockError) return { kind: 'block-error' } - // Active re-run of THIS column wins over its prior value — the value is - // about to be overwritten and the user should see the cell is changing. const inFlight = exec?.status === 'running' || exec?.status === 'queued' || exec?.status === 'pending' if (inFlight && blockRunning) return { kind: 'running' } - // Value wins over `pending-upstream`: once this column's output has - // landed, the cell is done from the user's perspective — even if the - // group is still running other blocks downstream. Without this, mid-run - // partial-write events (`status: 'running'` carrying outputs but tagging - // a different block as running) would flip a finished column back to the - // amber Pending pill until the terminal `completed` event arrives. + // Value wins over pending-upstream: a finished column stays finished even + // while other blocks in the group are still running. if (!isNull) return { kind: 'value', text: stringifyValue(value) } if (inFlight && !(groupHasBlockErrors && !blockRunning)) { if (exec?.status === 'queued' || exec?.status === 'pending') return { kind: 'queued' } - // `running` with this block not in `runningBlockIds` and no value yet = - // upstream block still going; surface as the amber Pending pill. return { kind: 'pending-upstream' } } - // Waiting wins over a stale terminal state: if deps are unmet right now, - // the prior `cancelled` / `error` is informational at best — the cell - // can't actually run until the user fills the missing input. Surface the - // actionable state instead of the stale one. + // Waiting wins over a stale terminal status — show the actionable state. if (waitingOnLabels && waitingOnLabels.length > 0) { return { kind: 'waiting', labels: waitingOnLabels } } @@ -113,6 +77,12 @@ export function resolveCellRender({ if (isNull) return { kind: 'empty' } if (column.type === 'json') return { kind: 'json', text: JSON.stringify(value) } if (column.type === 'date') return { kind: 'date', text: String(value) } + if (column.type === 'string') { + const text = stringifyValue(value) + const urlInfo = extractUrlInfo(text) + if (urlInfo) return { kind: 'url', text, href: urlInfo.href, domain: urlInfo.domain } + return { kind: 'text', text } + } return { kind: 'text', text: stringifyValue(value) } } @@ -122,19 +92,32 @@ function stringifyValue(value: unknown): string { return JSON.stringify(value) } +const BARE_DOMAIN_RE = /^([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,}$/ + +function extractUrlInfo(text: string): { href: string; domain: string } | null { + const trimmed = text.trim() + if (!trimmed) return null + if (/^https?:\/\//i.test(trimmed)) { + try { + const url = new URL(trimmed) + return { href: trimmed, domain: url.hostname } + } catch { + return null + } + } + if (BARE_DOMAIN_RE.test(trimmed)) { + const parsed = parse(trimmed) + if (!parsed.isIcann) return null + return { href: `https://${trimmed}`, domain: trimmed } + } + return null +} + interface CellRenderProps { kind: CellRenderKind - /** When true the static content sits underneath the InlineEditor overlay - * and should be visually hidden (but kept in flow to preserve cell size). */ isEditing: boolean } -/** - * Pure renderer: takes a `CellRenderKind` and returns the JSX. No business - * logic — adding a new cell appearance means adding a new `case` here. The - * exhaustiveness check on the `switch` (the unreachable default) flags any - * variant you forgot to handle. - */ export function CellRender({ kind, isEditing }: CellRenderProps): React.ReactElement | null { switch (kind.kind) { case 'value': @@ -237,6 +220,35 @@ export function CellRender({ kind, isEditing }: CellRenderProps): React.ReactEle ) + case 'url': + return ( + + { + e.currentTarget.style.display = 'none' + }} + /> + e.stopPropagation()} + onDoubleClick={(e) => e.stopPropagation()} + > + {kind.text} + + + ) + case 'text': return (
    +
    +
    { + if (e.button !== 0) return + onRowToggle(rowIndex, e.shiftKey) + }} + > + + {rowIndex + 1} + +
    + +
    +
    + {hasWorkflowColumns && ( + + )} +
    +
    { + if (e.button !== 0 || isEditing) return + onCellMouseDown(rowIndex, colIndex, e.shiftKey) + }} + onMouseEnter={() => onCellMouseEnter(rowIndex, colIndex)} + onClick={(e) => + onClick(row.id, column.name, { + toggleBoolean: + !e.shiftKey && + Boolean((e.target as HTMLElement).closest('[data-boolean-cell-toggle]')), + }) + } + onDoubleClick={() => onDoubleClick(row.id, column.name, column.key)} + > + {isHighlighted && (isMultiCell || isRowChecked) && ( +
    + )} + {isAnchor && ( +
    + )} +
    + onSave(row.id, column.name, value, reason)} + onCancel={onCancel} + waitingOnLabels={ + column.workflowGroupId + ? (waitingByGroupId?.get(column.workflowGroupId) ?? undefined) + : undefined + } + /> +
    +
    -
    -
    { - if (e.button !== 0) return - onRowToggle(rowIndex, e.shiftKey) - }} - > - - {rowIndex + 1} - -
    - -
    -
    - {hasWorkflowColumns && ( - - )} -
    -
    { - if (e.button !== 0 || isEditing) return - onCellMouseDown(rowIndex, colIndex, e.shiftKey) - }} - onMouseEnter={() => onCellMouseEnter(rowIndex, colIndex)} - onClick={(e) => - onClick(row.id, column.name, { - toggleBoolean: Boolean( - (e.target as HTMLElement).closest('[data-boolean-cell-toggle]') - ), - }) - } - onDoubleClick={() => onDoubleClick(row.id, column.name, column.key)} - > - {isHighlighted && (isMultiCell || isRowChecked) && ( -
    - )} - {isAnchor && ( -
    - )} -
    - onSave(row.id, column.name, value, reason)} - onCancel={onCancel} - waitingOnLabels={ - column.workflowGroupId - ? (waitingByGroupId?.get(column.workflowGroupId) ?? undefined) - : undefined - } - /> -
    -
    -
    - - {rowIndex + 1} - -
    -
    -
    - -
    -
    { - if (e.button !== 0) return - onCheckedChange() - }} - onKeyDown={(e) => { - if (e.key !== ' ' && e.key !== 'Enter') return - e.preventDefault() - onCheckedChange() - }} - > -
    - -
    -
    +
    + + {rowIndex + 1} + +
    +
    +
    + +
    +
    { + if (e.button !== 0) return + onCheckedChange() + }} + onKeyDown={(e) => { + if (e.key !== ' ' && e.key !== 'Enter') return + e.preventDefault() + onCheckedChange() + }} + > +
    + +
    +
    element. + */ +function applyCellProperties(td: HTMLElement, cell: TableCell, ctx: RenderContext): void { + const tcPr = cell.properties + if (!tcPr) return + + // Fill (overrides table style fill) + const solidFill = tcPr.child('solidFill') + if (solidFill.exists()) { + const { color, alpha } = resolveColor(solidFill, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b } = hexToRgb(hex) + td.style.backgroundColor = `rgba(${r},${g},${b},${alpha.toFixed(3)})` + } else { + td.style.backgroundColor = hex + } + } + + // Borders (override table style borders) + applyBorder(td, tcPr, 'lnT', 'borderTop', ctx) + applyBorder(td, tcPr, 'lnB', 'borderBottom', ctx) + applyBorder(td, tcPr, 'lnL', 'borderLeft', ctx) + applyBorder(td, tcPr, 'lnR', 'borderRight', ctx) + + // Margins / Padding + const marL = tcPr.numAttr('marL') + const marR = tcPr.numAttr('marR') + const marT = tcPr.numAttr('marT') + const marB = tcPr.numAttr('marB') + + // Default margin is 91440 EMU (0.1 inch) = ~9.6px + const defaultMargin = 91440 + td.style.paddingLeft = `${emuToPx(marL ?? defaultMargin)}px` + td.style.paddingRight = `${emuToPx(marR ?? defaultMargin)}px` + td.style.paddingTop = `${emuToPx(marT ?? 45720)}px` + td.style.paddingBottom = `${emuToPx(marB ?? 45720)}px` + + // Vertical alignment + const anchor = tcPr.attr('anchor') + const alignMap: Record = { + t: 'top', + ctr: 'middle', + b: 'bottom', + } + td.style.verticalAlign = alignMap[anchor || 't'] || 'top' +} + +/** + * Apply a single border to a element from a line node. + */ +function applyBorder( + td: HTMLElement, + tcPr: SafeXmlNode, + lineName: string, + cssProp: 'borderTop' | 'borderBottom' | 'borderLeft' | 'borderRight', + ctx: RenderContext +): void { + const ln = tcPr.child(lineName) + if (!ln.exists()) return + + // Check for noFill — explicitly clear any border set by table style + const noFill = ln.child('noFill') + if (noFill.exists()) { + td.style[cssProp] = 'none' + return + } + + const style = resolveLineStyle(ln, ctx) + if (style.width > 0 && style.color !== 'transparent') { + td.style[cssProp] = `${Math.max(style.width, 0.5)}px ${style.dash} ${style.color}` + } +} diff --git a/apps/sim/lib/pptx-renderer/renderer/text-renderer.ts b/apps/sim/lib/pptx-renderer/renderer/text-renderer.ts new file mode 100644 index 00000000000..e0f775fb83d --- /dev/null +++ b/apps/sim/lib/pptx-renderer/renderer/text-renderer.ts @@ -0,0 +1,988 @@ +/** + * Text renderer — converts OOXML text body into HTML DOM elements + * with full 7-level style inheritance. + */ + +import type { PlaceholderInfo } from '../model/nodes/base-node' +import type { TextBody } from '../model/nodes/shape-node' +import { angleToDeg, emuToPx, pctToDecimal } from '../parser/units' +import { SafeXmlNode } from '../parser/xml-parser' +import { isAllowedExternalUrl } from '../utils/url-safety' +import type { RenderContext } from './render-context' +import { resolveColor, resolveColorToCss } from './style-resolver' + +// --------------------------------------------------------------------------- +// Style Inheritance Helpers +// --------------------------------------------------------------------------- + +/** + * Find paragraph properties at a specific indent level from a list style node. + * Tries lvl{n}pPr (where n = level + 1), then falls back to defPPr. + */ +function findStyleAtLevel(styleNode: SafeXmlNode | undefined, level: number): SafeXmlNode { + if (!styleNode || !styleNode.exists()) { + return new SafeXmlNode(null) + } + // Try level-specific style (lvl1pPr, lvl2pPr, etc.) + const lvlNode = styleNode.child(`lvl${level + 1}pPr`) + if (lvlNode.exists()) return lvlNode + // Fall back to default + return styleNode.child('defPPr') +} + +/** + * Determine the placeholder category for style inheritance. + * Returns 'title', 'body', or 'other'. + */ +function getPlaceholderCategory( + placeholder: PlaceholderInfo | undefined +): 'title' | 'body' | 'other' { + if (!placeholder || !placeholder.type) return 'other' + const t = placeholder.type + if (t === 'title' || t === 'ctrTitle') return 'title' + if ( + t === 'body' || + t === 'subTitle' || + t === 'obj' || + t === 'dt' || + t === 'ftr' || + t === 'sldNum' + ) { + return 'body' + } + return 'other' +} + +/** + * Find a placeholder node in a list by matching type and/or idx. + */ +function findPlaceholderNode( + placeholders: SafeXmlNode[], + info: PlaceholderInfo +): SafeXmlNode | undefined { + for (const ph of placeholders) { + // Navigate to the ph element to read its attributes + let phEl: SafeXmlNode | undefined + const nvSpPr = ph.child('nvSpPr') + if (nvSpPr.exists()) { + phEl = nvSpPr.child('nvPr').child('ph') + } + if (!phEl || !phEl.exists()) { + const nvPicPr = ph.child('nvPicPr') + if (nvPicPr.exists()) { + phEl = nvPicPr.child('nvPr').child('ph') + } + } + if (!phEl || !phEl.exists()) continue + + const phType = phEl.attr('type') + const phIdx = phEl.numAttr('idx') + + // Match by idx first (most specific), then by type + if (info.idx !== undefined && phIdx === info.idx) return ph + if (info.type && phType === info.type) return ph + } + return undefined +} + +/** + * Extract lstStyle from a placeholder shape node. + */ +function getPlaceholderLstStyle(phNode: SafeXmlNode): SafeXmlNode | undefined { + const txBody = phNode.child('txBody') + if (!txBody.exists()) return undefined + const lstStyle = txBody.child('lstStyle') + return lstStyle.exists() ? lstStyle : undefined +} + +/** + * Merge a source paragraph property node onto a target style object. + * Later calls override earlier values (higher priority wins). + */ +interface MergedParagraphStyle { + align?: string + marginLeft?: number + textIndent?: number + lineHeight?: string + /** True when lineHeight comes from spcPts (absolute pt value). For CJK fonts, CSS line-height + * with absolute values may not produce exact spacing because the font's content area can exceed + * the line-height. When true, we use block-level line wrappers instead of
    for line breaks. */ + lineHeightAbsolute?: boolean + spaceBefore?: number + spaceBeforePct?: number // percentage of font size (0-1 range) + spaceAfter?: number + spaceAfterPct?: number // percentage of font size (0-1 range) + bulletChar?: string + bulletFont?: string + bulletAutoNum?: string + bulletNone?: boolean + /** When set, bullet color is taken from this OOXML buClr node (a:buClr with srgbClr/schemeClr child). */ + bulletColorNode?: SafeXmlNode + defRPr?: SafeXmlNode +} + +function mergeParagraphProps(target: MergedParagraphStyle, pPr: SafeXmlNode): void { + if (!pPr.exists()) return + + const algn = pPr.attr('algn') + if (algn) target.align = algn + + const marL = pPr.numAttr('marL') + if (marL !== undefined) target.marginLeft = emuToPx(marL) + + const indent = pPr.numAttr('indent') + if (indent !== undefined) target.textIndent = emuToPx(indent) + + // Line spacing + // OOXML spcPct: 100000 = "single spacing" = 1.0× the font's line height. + // IMPORTANT: We must use UNITLESS CSS line-height values (e.g., 1.0, 1.2) + // instead of percentages (e.g., 100%, 120%). CSS percentage line-height is + // computed once against the element's own font-size and inherited as a FIXED + // pixel value — so a parent div with line-height:120% and font-size:16px + // inherits 19.2px to ALL children, even those with font-size:80pt. + // Unitless values are inherited as-is and each child recomputes against its + // own font-size. + const lnSpc = pPr.child('lnSpc') + if (lnSpc.exists()) { + const spcPct = lnSpc.child('spcPct') + if (spcPct.exists()) { + const val = spcPct.numAttr('val') + if (val !== undefined) { + // OOXML 100000 → CSS unitless 1.0; OOXML 120000 → CSS 1.2 + target.lineHeight = `${(val / 100000).toFixed(3)}` + } + } + const spcPts = lnSpc.child('spcPts') + if (spcPts.exists()) { + const val = spcPts.numAttr('val') + if (val !== undefined) { + target.lineHeight = `${val / 100}pt` + target.lineHeightAbsolute = true + } + } + } + + // Space before + const spcBef = pPr.child('spcBef') + if (spcBef.exists()) { + const spcPts = spcBef.child('spcPts') + if (spcPts.exists()) { + const val = spcPts.numAttr('val') + if (val !== undefined) target.spaceBefore = val / 100 + } + const spcPct = spcBef.child('spcPct') + if (spcPct.exists()) { + const val = spcPct.numAttr('val') + if (val !== undefined) target.spaceBeforePct = val / 100000 // store as ratio + } + } + + // Space after + const spcAft = pPr.child('spcAft') + if (spcAft.exists()) { + const spcPts = spcAft.child('spcPts') + if (spcPts.exists()) { + const val = spcPts.numAttr('val') + if (val !== undefined) target.spaceAfter = val / 100 + } + const spcPct = spcAft.child('spcPct') + if (spcPct.exists()) { + const val = spcPct.numAttr('val') + if (val !== undefined) target.spaceAfterPct = val / 100000 // store as ratio + } + } + + // Bullets + const buChar = pPr.child('buChar') + if (buChar.exists()) { + target.bulletChar = buChar.attr('char') || '' + target.bulletNone = false + } + const buAutoNum = pPr.child('buAutoNum') + if (buAutoNum.exists()) { + target.bulletAutoNum = buAutoNum.attr('type') || 'arabicPeriod' + target.bulletNone = false + } + const buNone = pPr.child('buNone') + if (buNone.exists()) { + target.bulletNone = true + target.bulletChar = undefined + target.bulletAutoNum = undefined + } + const buFont = pPr.child('buFont') + if (buFont.exists()) { + target.bulletFont = buFont.attr('typeface') + } + // Explicit bullet color (a:buClr); when present overrides defRPr for bullet color + const buClr = pPr.child('buClr') + if (buClr.exists()) { + target.bulletColorNode = buClr + } + + // Default run properties (used as fallback for runs without rPr) + const defRPr = pPr.child('defRPr') + if (defRPr.exists()) { + target.defRPr = defRPr + } +} + +// --------------------------------------------------------------------------- +// Run Style Resolution +// --------------------------------------------------------------------------- + +interface MergedRunStyle { + fontSize?: number + bold?: boolean + italic?: boolean + underline?: boolean + strikethrough?: boolean + color?: string + fontFamily?: string + hlinkClick?: string + /** Character spacing (tracking) in points — from a:spc @val (hundredths of pt). */ + letterSpacingPt?: number + /** Kerning: minimum font size (pt) for kerning; 0 = always kern. */ + kern?: number + /** Text capitalization: "all" = ALL CAPS, "small" = SMALL CAPS, "none" = normal. */ + cap?: string + /** Baseline shift in percentage (positive = superscript, negative = subscript). */ + baseline?: number + /** CSS gradient string for text fill (from rPr > gradFill). */ + textGradientCss?: string + /** When true, text fill is transparent (a:noFill on rPr). */ + textNoFill?: boolean + /** Text outline width in px (from a:ln on rPr). */ + textOutlineWidth?: number + /** Text outline CSS color (solid fill on ln). */ + textOutlineColor?: string + /** Text outline CSS gradient (gradient fill on ln) — used as mask-image for fade effect. */ + textOutlineGradientCss?: string +} + +function mergeRunProps(target: MergedRunStyle, rPr: SafeXmlNode, ctx: RenderContext): void { + if (!rPr.exists()) return + + const sz = rPr.numAttr('sz') + if (sz !== undefined) target.fontSize = sz / 100 // hundredths of point -> pt + + const b = rPr.attr('b') + if (b !== undefined) target.bold = b === '1' || b === 'true' + + const i = rPr.attr('i') + if (i !== undefined) target.italic = i === '1' || i === 'true' + + const u = rPr.attr('u') + if (u !== undefined && u !== 'none') target.underline = true + if (u === 'none') target.underline = false + + const strike = rPr.attr('strike') + if (strike !== undefined && strike !== 'noStrike') target.strikethrough = true + if (strike === 'noStrike') target.strikethrough = false + + // Color from solidFill or gradFill child + const solidFill = rPr.child('solidFill') + if (solidFill.exists()) { + const { color, alpha } = resolveColor(solidFill, ctx) + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha < 1) { + const { r, g, b: bl } = hexToRgbInternal(hex) + target.color = `rgba(${r},${g},${bl},${alpha.toFixed(3)})` + } else { + target.color = hex + } + } + const gradFill = rPr.child('gradFill') + if (gradFill.exists()) { + const css = resolveGradientForText(gradFill, ctx) + if (css) target.textGradientCss = css + } + + // Font family + const latin = rPr.child('latin') + if (latin.exists()) { + const typeface = latin.attr('typeface') + if (typeface) { + target.fontFamily = resolveThemeFont(typeface, ctx) + } + } + if (!target.fontFamily) { + const ea = rPr.child('ea') + if (ea.exists()) { + const typeface = ea.attr('typeface') + if (typeface) { + target.fontFamily = resolveThemeFont(typeface, ctx) + } + } + } + if (!target.fontFamily) { + const cs = rPr.child('cs') + if (cs.exists()) { + const typeface = cs.attr('typeface') + if (typeface) { + target.fontFamily = resolveThemeFont(typeface, ctx) + } + } + } + + // Hyperlink + const hlinkClick = rPr.child('hlinkClick') + if (hlinkClick.exists()) { + // The actual URL is in the slide rels, referenced by r:id + const rId = hlinkClick.attr('id') ?? hlinkClick.attr('r:id') + if (rId) { + const rel = ctx.slide.rels.get(rId) + if (rel && rel.targetMode === 'External' && isAllowedExternalUrl(rel.target)) { + target.hlinkClick = rel.target + } + } + } + + // Character spacing (compact/tracking): rPr@spc in hundredths of a point + const spc = rPr.numAttr('spc') + if (spc !== undefined) target.letterSpacingPt = spc / 100 + + // Kerning: rPr@kern = minimum font size (hundredths of pt) to apply kerning; 0 = always + const kern = rPr.numAttr('kern') + if (kern !== undefined) target.kern = kern / 100 + + // Text capitalization: cap="all" (ALL CAPS) or cap="small" (SMALL CAPS) + const cap = rPr.attr('cap') + if (cap !== undefined) target.cap = cap + + // Baseline shift: positive = superscript, negative = subscript (in 1000ths of percent) + const baseline = rPr.numAttr('baseline') + if (baseline !== undefined) target.baseline = baseline + + // Text noFill: a:noFill on rPr makes text interior transparent + if (rPr.child('noFill').exists()) { + target.textNoFill = true + } + + // Text outline: a:ln on rPr defines text stroke/outline + const ln = rPr.child('ln') + if (ln.exists() && !ln.child('noFill').exists()) { + const lnW = ln.numAttr('w') + target.textOutlineWidth = lnW ? emuToPx(lnW) : 0.75 // default ~0.75px + // Solid fill on outline + const lnSolid = ln.child('solidFill') + if (lnSolid.exists()) { + const { color: c, alpha: a } = resolveColor(lnSolid, ctx) + target.textOutlineColor = colorToCssLocal(c, a) + } + // Gradient fill on outline — build CSS gradient for mask effect + const lnGrad = ln.child('gradFill') + if (lnGrad.exists()) { + target.textOutlineGradientCss = resolveGradientForText(lnGrad, ctx) + } + } +} + +/** + * Resolve theme font placeholder references like "+mj-lt" or "+mn-lt". + */ +function resolveThemeFont(typeface: string, ctx: RenderContext): string { + if (typeface === '+mj-lt' || typeface === '+mj-ea' || typeface === '+mj-cs') { + const key = typeface.slice(3) as 'lt' | 'ea' | 'cs' + const mapping: Record = { lt: 'latin', ea: 'ea', cs: 'cs' } + return ctx.theme.majorFont[mapping[key] || 'latin'] || typeface + } + if (typeface === '+mn-lt' || typeface === '+mn-ea' || typeface === '+mn-cs') { + const key = typeface.slice(3) as 'lt' | 'ea' | 'cs' + const mapping: Record = { lt: 'latin', ea: 'ea', cs: 'cs' } + return ctx.theme.minorFont[mapping[key] || 'latin'] || typeface + } + return typeface +} + +/** + * Minimal hex-to-rgb parser for inline use. + */ +function hexToRgbInternal(hex: string): { r: number; g: number; b: number } { + const cleaned = hex.replace(/^#/, '') + const num = Number.parseInt( + cleaned.length === 3 + ? cleaned[0] + cleaned[0] + cleaned[1] + cleaned[1] + cleaned[2] + cleaned[2] + : cleaned, + 16 + ) + return { r: (num >> 16) & 0xff, g: (num >> 8) & 0xff, b: num & 0xff } +} + +/** + * Convert resolved color + alpha to CSS color string. + */ +function colorToCssLocal(color: string, alpha: number): string { + const hex = color.startsWith('#') ? color : `#${color}` + if (alpha >= 1) return hex + const { r, g, b } = hexToRgbInternal(hex) + return `rgba(${r},${g},${b},${alpha.toFixed(3)})` +} + +/** + * Resolve a gradient fill node into a CSS linear-gradient string. + * Used for text outline gradient effects. + */ +function resolveGradientForText(gradFill: SafeXmlNode, ctx: RenderContext): string { + const gsLst = gradFill.child('gsLst') + const stops: { position: number; color: string }[] = [] + for (const gs of gsLst.children('gs')) { + const pos = gs.numAttr('pos') ?? 0 + const posPercent = pctToDecimal(pos) * 100 + const { color, alpha } = resolveColor(gs, ctx) + stops.push({ position: posPercent, color: colorToCssLocal(color, alpha) }) + } + if (stops.length === 0) return '' + stops.sort((a, b) => a.position - b.position) + const stopsStr = stops.map((s) => `${s.color} ${s.position.toFixed(1)}%`).join(', ') + const lin = gradFill.child('lin') + if (lin.exists()) { + const angle = angleToDeg(lin.numAttr('ang') ?? 0) + const cssAngle = (angle + 90) % 360 + return `linear-gradient(${cssAngle.toFixed(1)}deg, ${stopsStr})` + } + return `linear-gradient(180deg, ${stopsStr})` +} + +// --------------------------------------------------------------------------- +// Bullet Generation +// --------------------------------------------------------------------------- + +function generateAutoNumber(type: string, index: number): string { + const num = index + 1 + switch (type) { + case 'arabicPeriod': + return `${num}.` + case 'arabicParenR': + return `${num})` + case 'arabicParenBoth': + return `(${num})` + case 'arabicPlain': + return `${num}` + case 'romanUcPeriod': + return `${toRoman(num)}.` + case 'romanLcPeriod': + return `${toRoman(num).toLowerCase()}.` + case 'alphaUcPeriod': + return `${String.fromCharCode(64 + (((num - 1) % 26) + 1))}.` + case 'alphaLcPeriod': + return `${String.fromCharCode(96 + (((num - 1) % 26) + 1))}.` + case 'alphaUcParenR': + return `${String.fromCharCode(64 + (((num - 1) % 26) + 1))})` + case 'alphaLcParenR': + return `${String.fromCharCode(96 + (((num - 1) % 26) + 1))})` + default: + return `${num}.` + } +} + +function toRoman(num: number): string { + const vals = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1] + const syms = ['M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I'] + let result = '' + let remaining = num + for (let i = 0; i < vals.length; i++) { + while (remaining >= vals[i]) { + result += syms[i] + remaining -= vals[i] + } + } + return result +} + +// --------------------------------------------------------------------------- +// Main Render Function +// --------------------------------------------------------------------------- + +/** + * Render a text body into the provided container element. + * + * Implements 7-level style inheritance: + * 1. master.defaultTextStyle + * 2. master.textStyles[category] (titleStyle / bodyStyle / otherStyle) + * 3. master placeholder lstStyle + * 4. layout placeholder lstStyle + * 5. shape lstStyle + * 6. paragraph pPr + * 7. run rPr + */ +/** Optional overrides when rendering text (e.g. table cell style text properties from tcTxStyle). */ +interface RenderTextBodyOptions { + /** When set, used as text color when the run has no explicit color (e.g. table style tcTxStyle). */ + cellTextColor?: string + /** When set, applies bold from table style tcTxStyle (overrides inherited, yields to explicit run rPr). */ + cellTextBold?: boolean + /** When set, applies italic from table style tcTxStyle (overrides inherited, yields to explicit run rPr). */ + cellTextItalic?: boolean + /** When set, applies font family from table style tcTxStyle (overrides inherited, yields to explicit run rPr). */ + cellTextFontFamily?: string + /** fontRef color from shape style (e.g. SmartArt). Overrides inherited styles but yields to explicit run rPr color. */ + fontRefColor?: string +} + +export function renderTextBody( + textBody: TextBody, + placeholder: PlaceholderInfo | undefined, + ctx: RenderContext, + container: HTMLElement, + options?: RenderTextBodyOptions +): void { + const category = getPlaceholderCategory(placeholder) + let bulletCounter = 0 + + // Parse normAutofit from bodyPr (font scaling + line spacing reduction) + let fontScale = 1 + let lnSpcReduction = 0 + if (textBody.bodyProperties) { + const normAutofit = textBody.bodyProperties.child('normAutofit') + if (normAutofit.exists()) { + const fs = normAutofit.numAttr('fontScale') + if (fs !== undefined) fontScale = fs / 100000 // 100000 = 100% + const lsr = normAutofit.numAttr('lnSpcReduction') + if (lsr !== undefined) lnSpcReduction = lsr / 100000 // e.g., 20000 = 20% + } + } + + for (const paragraph of textBody.paragraphs) { + const paraDiv = document.createElement('div') + const level = paragraph.level + + // ---- Build merged paragraph style (7-level inheritance) ---- + const merged: MergedParagraphStyle = {} + + // Level 1: master defaultTextStyle + mergeParagraphProps(merged, findStyleAtLevel(ctx.master.defaultTextStyle, level)) + + // Level 2: master text styles by category + const masterTextStyle = + category === 'title' + ? ctx.master.textStyles.titleStyle + : category === 'body' + ? ctx.master.textStyles.bodyStyle + : ctx.master.textStyles.otherStyle + mergeParagraphProps(merged, findStyleAtLevel(masterTextStyle, level)) + + // Level 3: master placeholder lstStyle + if (placeholder) { + const masterPh = findPlaceholderNode(ctx.master.placeholders, placeholder) + if (masterPh) { + const lstStyle = getPlaceholderLstStyle(masterPh) + mergeParagraphProps(merged, findStyleAtLevel(lstStyle, level)) + } + } + + // Level 4: layout placeholder lstStyle + if (placeholder) { + const layoutPh = findPlaceholderNode( + ctx.layout.placeholders.map((e) => e.node), + placeholder + ) + if (layoutPh) { + const lstStyle = getPlaceholderLstStyle(layoutPh) + mergeParagraphProps(merged, findStyleAtLevel(lstStyle, level)) + } + } + + // Level 5: shape lstStyle + mergeParagraphProps(merged, findStyleAtLevel(textBody.listStyle, level)) + + // Level 6: paragraph pPr + if (paragraph.properties) { + mergeParagraphProps(merged, paragraph.properties) + } + + // ---- Apply paragraph styles ---- + if (merged.align) { + const alignMap: Record = { + l: 'left', + ctr: 'center', + r: 'right', + just: 'justify', + dist: 'justify', + } + paraDiv.style.textAlign = alignMap[merged.align] || 'left' + } + if (merged.marginLeft !== undefined) { + paraDiv.style.marginLeft = `${merged.marginLeft}px` + } + if (merged.textIndent !== undefined) { + paraDiv.style.textIndent = `${merged.textIndent}px` + } + // Compute effective line-height (with optional lnSpcReduction from normAutofit) + let effectiveLineHeight = merged.lineHeight + if (merged.lineHeight) { + if (lnSpcReduction > 0) { + const parsed = Number.parseFloat(merged.lineHeight) + if (!Number.isNaN(parsed)) { + if (merged.lineHeight.includes('pt')) { + effectiveLineHeight = `${(parsed * (1 - lnSpcReduction)).toFixed(2)}pt` + } else { + effectiveLineHeight = `${(parsed * (1 - lnSpcReduction)).toFixed(3)}` + } + } + } + paraDiv.style.lineHeight = effectiveLineHeight! + } + // Determine effective font size for percentage-based spacing + // Use defRPr or first run's font size, fallback to 12pt + let effectiveFontSize = 12 // default 12pt + if (merged.defRPr) { + const sz = merged.defRPr.numAttr('sz') + if (sz !== undefined) effectiveFontSize = sz / 100 + } + if (paragraph.runs.length > 0 && paragraph.runs[0].properties) { + const sz = paragraph.runs[0].properties.numAttr('sz') + if (sz !== undefined) effectiveFontSize = sz / 100 + } + + if (merged.spaceBefore !== undefined) { + paraDiv.style.marginTop = `${merged.spaceBefore}pt` + } else if (merged.spaceBeforePct !== undefined) { + paraDiv.style.marginTop = `${merged.spaceBeforePct * effectiveFontSize}pt` + } + if (merged.spaceAfter !== undefined) { + paraDiv.style.marginBottom = `${merged.spaceAfter}pt` + } else if (merged.spaceAfterPct !== undefined) { + paraDiv.style.marginBottom = `${merged.spaceAfterPct * effectiveFontSize}pt` + } + + // ---- Bullets ---- + // Suppress bullets for metadata placeholders (slide number, date, footer) + // Also suppress for empty paragraphs (no visible runs) — PowerPoint never shows bullets for them + const hasVisibleRuns = paragraph.runs.some((r) => r.text != null && r.text.length > 0) + const suppressBullet = + !hasVisibleRuns || + placeholder?.type === 'sldNum' || + placeholder?.type === 'dt' || + placeholder?.type === 'ftr' || + placeholder?.type === 'title' || + placeholder?.type === 'ctrTitle' || + placeholder?.type === 'subTitle' + let bulletPrefix = '' + if (!suppressBullet && merged.bulletNone !== true) { + if (merged.bulletChar) { + bulletPrefix = merged.bulletChar + } else if (merged.bulletAutoNum) { + bulletPrefix = generateAutoNumber(merged.bulletAutoNum, bulletCounter) + bulletCounter++ + } + } + + if (bulletPrefix) { + const bulletSpan = document.createElement('span') + bulletSpan.textContent = `${bulletPrefix} ` + if (merged.bulletFont) { + bulletSpan.style.fontFamily = merged.bulletFont + } + // Bullet color: 1) explicit buClr from list style, 2) paragraph defRPr, 3) first run's color (so bullet matches text), 4) cell/fallback + let bulletColor: string | undefined + if (merged.bulletColorNode?.exists()) { + bulletColor = resolveColorToCss(merged.bulletColorNode, ctx) + } + if (bulletColor === undefined && merged.defRPr && merged.defRPr.exists()) { + const bulletRunStyle: MergedRunStyle = {} + mergeRunProps(bulletRunStyle, merged.defRPr, ctx) + bulletColor = bulletRunStyle.color + } + if (bulletColor === undefined && paragraph.runs.length > 0) { + const runStyle: MergedRunStyle = {} + if (merged.defRPr) mergeRunProps(runStyle, merged.defRPr, ctx) + if (paragraph.runs[0].properties) mergeRunProps(runStyle, paragraph.runs[0].properties, ctx) + bulletColor = runStyle.color + } + // Fallback: check shape's lstStyle defRPr for color (same as run fallback) + if (bulletColor === undefined && textBody.listStyle) { + const lstStyleLevel = findStyleAtLevel(textBody.listStyle, level) + if (lstStyleLevel.exists()) { + const lstDefRPr = lstStyleLevel.child('defRPr') + if (lstDefRPr.exists()) { + const fallbackStyle: MergedRunStyle = {} + mergeRunProps(fallbackStyle, lstDefRPr, ctx) + if (fallbackStyle.color !== undefined) { + bulletColor = fallbackStyle.color + } + } + } + } + bulletSpan.style.color = + bulletColor ?? options?.fontRefColor ?? options?.cellTextColor ?? '#000000' + paraDiv.appendChild(bulletSpan) + } + + // ---- Render runs ---- + if (paragraph.runs.length === 0) { + // Empty paragraph — still need to maintain spacing + paraDiv.appendChild(document.createElement('br')) + } + + // When line spacing is absolute (spcPts) and paragraph has line breaks, + // wrap each line in a block-level div with explicit height. This ensures + // exact spacing regardless of font metrics (CJK fonts e.g. Microsoft YaHei have + // content areas taller than font-size, causing CSS line-height to be + // overridden by the font's natural spacing). + const hasLineBreaks = paragraph.runs.some((r) => r.text === '\n') + // Set tab-size when paragraph contains tab characters (default OOXML tab spacing = 914400 EMU = 96px) + if (paragraph.runs.some((r) => r.text?.includes('\t'))) { + const defaultTabPx = 96 // 914400 EMU at 96 dpi + paraDiv.style.tabSize = `${defaultTabPx}px` + } + const useLineWrappers = merged.lineHeightAbsolute && hasLineBreaks && effectiveLineHeight + let currentLineDiv: HTMLElement | null = null + if (useLineWrappers) { + currentLineDiv = document.createElement('div') + currentLineDiv.style.height = effectiveLineHeight! + currentLineDiv.style.overflow = 'visible' + paraDiv.appendChild(currentLineDiv) + } + + for (const run of paragraph.runs) { + if (run.text === '\n') { + if (useLineWrappers) { + // Close current line div and start a new one + currentLineDiv = document.createElement('div') + currentLineDiv.style.height = effectiveLineHeight! + currentLineDiv.style.overflow = 'visible' + paraDiv.appendChild(currentLineDiv) + } else { + paraDiv.appendChild(document.createElement('br')) + } + continue + } + + // Build merged run style + const runStyle: MergedRunStyle = {} + + // Apply default run properties from merged paragraph defRPr + if (merged.defRPr) { + mergeRunProps(runStyle, merged.defRPr, ctx) + } + + // Level 7: run rPr + if (run.properties) { + mergeRunProps(runStyle, run.properties, ctx) + } + + // Fallback: if no color resolved yet, check the shape's lstStyle defRPr. + // This handles the case where paragraph pPr has an empty that + // overwrites the lstStyle's defRPr (which may carry solidFill color). + if (runStyle.color === undefined && textBody.listStyle) { + const lstStyleLevel = findStyleAtLevel(textBody.listStyle, level) + if (lstStyleLevel.exists()) { + const lstDefRPr = lstStyleLevel.child('defRPr') + if (lstDefRPr.exists()) { + const fallbackStyle: MergedRunStyle = {} + mergeRunProps(fallbackStyle, lstDefRPr, ctx) + if (fallbackStyle.color !== undefined) { + runStyle.color = fallbackStyle.color + } + } + } + } + + // Determine if this should be a link + let element: HTMLElement + if (runStyle.hlinkClick) { + const a = document.createElement('a') + a.href = runStyle.hlinkClick + a.target = '_blank' + a.rel = 'noopener noreferrer' + element = a + } else { + element = document.createElement('span') + } + + // Preserve consecutive spaces by alternating with   so they survive + // HTML whitespace collapse without being stretched by text-align:justify. + // Tabs still need white-space:pre for tab-stop rendering. + if (run.text?.includes('\t')) { + element.textContent = run.text + element.style.whiteSpace = 'pre' + } else if (run.text && / {2}/.test(run.text)) { + // Replace pairs of spaces with "  " so browsers cannot collapse them, + // while normal spaces between words remain stretchable for justify. + const escaped = run.text + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/ {2}/g, ' \u00a0') + element.innerHTML = escaped + } else { + element.textContent = run.text + } + + // Apply run styles (with normAutofit fontScale) + // Default to 12pt if no font size specified at any inheritance level + const fontSize = runStyle.fontSize || 12 + element.style.fontSize = `${fontSize * fontScale}pt` + // Bold: explicit run rPr > cellTextBold (table style tcTxStyle) > inherited styles + const hasExplicitRunBold = run.properties?.attr('b') !== undefined + if (hasExplicitRunBold ? runStyle.bold : (options?.cellTextBold ?? runStyle.bold)) { + element.style.fontWeight = 'bold' + } + // Italic: explicit run rPr > cellTextItalic (table style tcTxStyle) > inherited styles + const hasExplicitRunItalic = run.properties?.attr('i') !== undefined + if (hasExplicitRunItalic ? runStyle.italic : (options?.cellTextItalic ?? runStyle.italic)) { + element.style.fontStyle = 'italic' + } + + const decorations: string[] = [] + if (runStyle.underline) decorations.push('underline') + if (runStyle.strikethrough) decorations.push('line-through') + if (decorations.length > 0) { + element.style.textDecoration = decorations.join(' ') + } + + // Color priority: explicit run rPr > hlink theme color > cellTextColor (table style tcTxStyle) > fontRef (shape style) > inherited styles > black default + // cellTextColor from table style overrides inherited cascade colors but yields to explicit run/paragraph solidFill/gradFill. + // fontRefColor overrides inherited styles but yields to explicit run solidFill/gradFill. + const hasExplicitRunColor = + run.properties?.child('solidFill').exists() || run.properties?.child('gradFill').exists() + let effectiveColor: string | undefined + if (options?.fontRefColor) { + effectiveColor = hasExplicitRunColor ? runStyle.color : options.fontRefColor + } else if (options?.cellTextColor && !hasExplicitRunColor) { + effectiveColor = options.cellTextColor + } else { + effectiveColor = runStyle.color + } + + // Hyperlink default color: when the run is a hyperlink and has no explicit + // solidFill on its own rPr, use the theme's hlink color. This matches + // PowerPoint behaviour where hyperlink text defaults to the hlink scheme color. + if (runStyle.hlinkClick && !hasExplicitRunColor) { + const hlinkHex = ctx.theme.colorScheme.get('hlink') + if (hlinkHex) { + effectiveColor = hlinkHex.startsWith('#') ? hlinkHex : `#${hlinkHex}` + } + } + + if (effectiveColor) { + element.style.color = effectiveColor + } else { + // No explicit color from run/paragraph/style: use black so text does not inherit page CSS (e.g. body { color: #e0e0e0 }) + element.style.color = '#000000' + } + + // Gradient text fill: use background-clip to paint text with gradient + if (runStyle.textGradientCss) { + element.style.background = runStyle.textGradientCss + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).webkitBackgroundClip = 'text' + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).backgroundClip = 'text' + element.style.color = 'transparent' + } + + // Text outline (a:ln on rPr) and noFill handling + if (runStyle.textNoFill || runStyle.textOutlineWidth) { + const strokeW = runStyle.textOutlineWidth ?? 0.75 + if (runStyle.textNoFill && runStyle.textOutlineGradientCss) { + // Ghost text: no fill + gradient outline → show outline fading via mask + const outlineColor = '#ffffff' // base stroke color (gradient applied via mask) + element.style.color = 'transparent' + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).webkitTextStrokeWidth = `${strokeW}px` + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).webkitTextStrokeColor = outlineColor + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).paintOrder = 'stroke fill' + // Use mask-image to apply the gradient fade to the entire text element + const maskGrad = runStyle.textOutlineGradientCss + element.style.maskImage = maskGrad + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).webkitMaskImage = maskGrad + } else if (runStyle.textNoFill && runStyle.textOutlineColor) { + // Ghost text with solid outline + element.style.color = 'transparent' + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).webkitTextStrokeWidth = `${strokeW}px` + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).webkitTextStrokeColor = runStyle.textOutlineColor + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).paintOrder = 'stroke fill' + } else if (runStyle.textNoFill) { + // noFill with no outline — invisible text (but keep space) + element.style.color = 'transparent' + } else if (runStyle.textOutlineColor) { + // Outline with normal fill + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).webkitTextStrokeWidth = `${strokeW}px` + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).webkitTextStrokeColor = runStyle.textOutlineColor + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ;(element.style as any).paintOrder = 'stroke fill' + } + } + + // Font family: explicit run rPr > cellTextFontFamily (table style) > inherited > theme fallback + const hasExplicitRunFont = + run.properties?.child('latin').exists() || + run.properties?.child('ea').exists() || + run.properties?.child('cs').exists() + const effectiveFont = hasExplicitRunFont + ? runStyle.fontFamily + : (options?.cellTextFontFamily ?? runStyle.fontFamily) + if (effectiveFont) { + element.style.fontFamily = `"${effectiveFont}"` + } else { + // Fallback to theme minor font + const fallback = ctx.theme.minorFont.latin || ctx.theme.minorFont.ea + if (fallback) { + element.style.fontFamily = `"${fallback}"` + } + } + + // Character spacing (a:spc) — compact/tracking in points + if (runStyle.letterSpacingPt !== undefined) { + element.style.letterSpacing = `${runStyle.letterSpacingPt}pt` + } + // Kerning (a:kern): val = min font size (pt) to kern; 0 = always kern + if (runStyle.kern !== undefined) { + const effectivePt = (runStyle.fontSize || 12) * fontScale + element.style.fontKerning = effectivePt >= runStyle.kern ? 'normal' : 'none' + } + + // Text capitalization (a:rPr@cap) + if (runStyle.cap === 'all') { + element.style.textTransform = 'uppercase' + } else if (runStyle.cap === 'small') { + element.style.fontVariant = 'small-caps' + } + + // Baseline shift (superscript/subscript) + if (runStyle.baseline !== undefined && runStyle.baseline !== 0) { + // OOXML baseline is in 1000ths of percent; positive = superscript, negative = subscript + const shiftPct = runStyle.baseline / 1000 + element.style.verticalAlign = `${shiftPct}%` + // Reduce font size for super/subscript + if (Math.abs(shiftPct) >= 20) { + element.style.fontSize = `${fontSize * fontScale * 0.65}pt` + } + } + + // Append to the current line wrapper (when using absolute line spacing) + // or directly to the paragraph div + const appendTarget = currentLineDiv ?? paraDiv + appendTarget.appendChild(element) + } + + // endParaRPr: when the paragraph ends with a line break (trailing \n), + // the end-of-paragraph mark (endParaRPr) defines the font size for the + // trailing blank line. Without this, bottom-anchored text boxes render + // content too low because the trailing space is too small. + if (paragraph.endParaRPr) { + const lastRun = paragraph.runs[paragraph.runs.length - 1] + if (lastRun?.text === '\n') { + const epSz = paragraph.endParaRPr.numAttr('sz') + if (epSz !== undefined) { + const spacer = document.createElement('span') + spacer.textContent = '\u200B' // zero-width space to maintain line height + spacer.style.fontSize = `${(epSz / 100) * fontScale}pt` + const target = currentLineDiv ?? paraDiv + target.appendChild(spacer) + } + } + } + + container.appendChild(paraDiv) + } +} diff --git a/apps/sim/lib/pptx-renderer/shapes/custom-geometry.ts b/apps/sim/lib/pptx-renderer/shapes/custom-geometry.ts new file mode 100644 index 00000000000..c353ec2d2d5 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/shapes/custom-geometry.ts @@ -0,0 +1,178 @@ +/** + * Parse OOXML custom geometry (a:custGeom) into SVG path strings. + */ + +import type { SafeXmlNode } from '../parser/xml-parser' + +function inferPathExtent(pathNode: SafeXmlNode): { w: number; h: number } { + let maxX = 0 + let maxY = 0 + + for (const cmd of pathNode.allChildren()) { + if (cmd.localName === 'moveTo' || cmd.localName === 'lnTo') { + const pt = cmd.child('pt') + maxX = Math.max(maxX, pt.numAttr('x') ?? 0) + maxY = Math.max(maxY, pt.numAttr('y') ?? 0) + continue + } + if (cmd.localName === 'cubicBezTo' || cmd.localName === 'quadBezTo') { + for (const pt of cmd.children('pt')) { + maxX = Math.max(maxX, pt.numAttr('x') ?? 0) + maxY = Math.max(maxY, pt.numAttr('y') ?? 0) + } + continue + } + if (cmd.localName === 'arcTo') { + maxX = Math.max(maxX, cmd.numAttr('wR') ?? 0) + maxY = Math.max(maxY, cmd.numAttr('hR') ?? 0) + } + } + + return { + w: Math.max(1, maxX), + h: Math.max(1, maxY), + } +} + +/** + * Render a custom geometry element to an SVG path d-attribute string. + * + * @param custGeom - SafeXmlNode wrapping the `a:custGeom` element + * @param width - Target width in pixels + * @param height - Target height in pixels + * @returns SVG path d-attribute string + */ +export function renderCustomGeometry( + custGeom: SafeXmlNode, + width: number, + height: number, + sourceExtent?: { w: number; h: number } +): string { + const pathLst = custGeom.child('pathLst') + if (!pathLst.exists()) return '' + + const paths = pathLst.children('path') + const segments: string[] = [] + + for (const pathNode of paths) { + const fallbackExtent = inferPathExtent(pathNode) + const pathW = pathNode.numAttr('w') ?? sourceExtent?.w ?? fallbackExtent.w + const pathH = pathNode.numAttr('h') ?? sourceExtent?.h ?? fallbackExtent.h + + const scaleX = pathW > 0 ? width / pathW : 1 + const scaleY = pathH > 0 ? height / pathH : 1 + + // Track current position for arcTo calculations + let curX = 0 + let curY = 0 + + const commands = pathNode.allChildren() + for (const cmd of commands) { + switch (cmd.localName) { + case 'moveTo': { + const pt = cmd.child('pt') + const x = (pt.numAttr('x') ?? 0) * scaleX + const y = (pt.numAttr('y') ?? 0) * scaleY + segments.push(`M${x},${y}`) + curX = x + curY = y + break + } + + case 'lnTo': { + const pt = cmd.child('pt') + const x = (pt.numAttr('x') ?? 0) * scaleX + const y = (pt.numAttr('y') ?? 0) * scaleY + segments.push(`L${x},${y}`) + curX = x + curY = y + break + } + + case 'cubicBezTo': { + const pts = cmd.children('pt') + if (pts.length >= 3) { + const x1 = (pts[0].numAttr('x') ?? 0) * scaleX + const y1 = (pts[0].numAttr('y') ?? 0) * scaleY + const x2 = (pts[1].numAttr('x') ?? 0) * scaleX + const y2 = (pts[1].numAttr('y') ?? 0) * scaleY + const x3 = (pts[2].numAttr('x') ?? 0) * scaleX + const y3 = (pts[2].numAttr('y') ?? 0) * scaleY + segments.push(`C${x1},${y1} ${x2},${y2} ${x3},${y3}`) + curX = x3 + curY = y3 + } + break + } + + case 'quadBezTo': { + const pts = cmd.children('pt') + if (pts.length >= 2) { + const x1 = (pts[0].numAttr('x') ?? 0) * scaleX + const y1 = (pts[0].numAttr('y') ?? 0) * scaleY + const x2 = (pts[1].numAttr('x') ?? 0) * scaleX + const y2 = (pts[1].numAttr('y') ?? 0) * scaleY + segments.push(`Q${x1},${y1} ${x2},${y2}`) + curX = x2 + curY = y2 + } + break + } + + case 'arcTo': { + const wRRaw = cmd.numAttr('wR') ?? 0 + const hRRaw = cmd.numAttr('hR') ?? 0 + const wR = wRRaw * scaleX + const hR = hRRaw * scaleY + const stAngRaw = cmd.numAttr('stAng') ?? 0 + const swAngRaw = cmd.numAttr('swAng') ?? 0 + + // OOXML angles are in 60000ths of a degree + const stAng = stAngRaw / 60000 + const swAng = swAngRaw / 60000 + + if (wR === 0 || hR === 0 || swAng === 0) { + // Degenerate arc, skip + break + } + + // OOXML arcTo angles are visual (geometric ray) angles in path coordinate space. + // Convert to parametric using UNSCALED radii before computing positions. + const stVisRad = (stAng * Math.PI) / 180 + const stAngRad = Math.atan2(wRRaw * Math.sin(stVisRad), hRRaw * Math.cos(stVisRad)) + + const endVisRad = ((stAng + swAng) * Math.PI) / 180 + const endAngRad = Math.atan2(wRRaw * Math.sin(endVisRad), hRRaw * Math.cos(endVisRad)) + + // Compute center and endpoint in unscaled path space, then scale + const curXU = curX / scaleX + const curYU = curY / scaleY + const cx = curXU - wRRaw * Math.cos(stAngRad) + const cy = curYU - hRRaw * Math.sin(stAngRad) + const endX = (cx + wRRaw * Math.cos(endAngRad)) * scaleX + const endY = (cy + hRRaw * Math.sin(endAngRad)) * scaleY + + // SVG arc flags + const largeArc = Math.abs(swAng) > 180 ? 1 : 0 + const sweep = swAng > 0 ? 1 : 0 + + segments.push(`A${wR},${hR} 0 ${largeArc},${sweep} ${endX},${endY}`) + curX = endX + curY = endY + break + } + + case 'close': { + segments.push('Z') + break + } + + default: + // Unknown command, skip + break + } + } + } + + return segments.join(' ') +} diff --git a/apps/sim/lib/pptx-renderer/shapes/presets.ts b/apps/sim/lib/pptx-renderer/shapes/presets.ts new file mode 100644 index 00000000000..0be28172864 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/shapes/presets.ts @@ -0,0 +1,6591 @@ +import { createLogger } from '@sim/logger' + +const logger = createLogger('PptxShapePresets') + +/** + * Preset shape SVG path generators for OOXML preset geometry types. + * + * Each generator takes width, height, and optional adjustment values, + * returning an SVG path d-attribute string. + * + * Adjustment values follow OOXML convention: values are in 100000ths + * (so 50000 = 50%). + */ + +import { shapeArc } from './shape-arc' + +type PresetShapeGenerator = (w: number, h: number, adjustments?: Map) => string + +/** Helper: get adjustment value or default, converting from 100000ths to fraction. */ +function adj( + adjustments: Map | undefined, + name: string, + defaultVal: number +): number { + const raw = adjustments?.get(name) ?? defaultVal + return raw / 100000 +} + +/** Helper: generate a regular polygon path (inscribed in bounding box). */ +function _regularPolygon(w: number, h: number, sides: number): string { + const cx = w / 2 + const cy = h / 2 + const rx = w / 2 + const ry = h / 2 + const parts: string[] = [] + for (let i = 0; i < sides; i++) { + // Start from top center (-90 degrees) + const angle = (2 * Math.PI * i) / sides - Math.PI / 2 + const x = cx + rx * Math.cos(angle) + const y = cy + ry * Math.sin(angle) + parts.push(i === 0 ? `M${x},${y}` : `L${x},${y}`) + } + parts.push('Z') + return parts.join(' ') +} + +/** Raw adj helper: get adjustment value without dividing by 100000. */ +function adjRaw( + adjustments: Map | undefined, + name: string, + defaultVal: number +): number { + return adjustments?.get(name) ?? defaultVal +} + +/** Helper: generate a star polygon. */ +function starShape(w: number, h: number, points: number, innerRatio = 0.4): string { + const cx = w / 2 + const cy = h / 2 + const outerRx = w / 2 + const outerRy = h / 2 + const innerRx = outerRx * innerRatio + const innerRy = outerRy * innerRatio + const totalPoints = points * 2 + const parts: string[] = [] + + for (let i = 0; i < totalPoints; i++) { + const angle = (2 * Math.PI * i) / totalPoints - Math.PI / 2 + const isOuter = i % 2 === 0 + const rx = isOuter ? outerRx : innerRx + const ry = isOuter ? outerRy : innerRy + const x = cx + rx * Math.cos(angle) + const y = cy + ry * Math.sin(angle) + parts.push(i === 0 ? `M${x},${y}` : `L${x},${y}`) + } + parts.push('Z') + return parts.join(' ') +} + +/** + * Mirror an absolute SVG path horizontally across the given width. + * Supports the command subset used by preset arrow shapes: M, L, A, Z. + */ +function mirrorAbsolutePathHorizontally(path: string, width: number): string { + const tokens = path.match(/[MLAZ]|-?\d*\.?\d+(?:e[-+]?\d+)?/gi) + if (!tokens) return path + + const out: string[] = [] + let i = 0 + while (i < tokens.length) { + const cmd = tokens[i++] + if (!cmd) break + out.push(cmd) + if (cmd === 'Z') continue + if (cmd === 'M' || cmd === 'L') { + const x = Number(tokens[i++]) + const y = Number(tokens[i++]) + out.push(String(width - x), String(y)) + continue + } + if (cmd === 'A') { + const rx = tokens[i++] + const ry = tokens[i++] + const rot = tokens[i++] + const largeArc = tokens[i++] + const sweep = Number(tokens[i++]) + const x = Number(tokens[i++]) + const y = Number(tokens[i++]) + out.push(rx, ry, rot, largeArc, String(sweep ? 0 : 1), String(width - x), String(y)) + continue + } + return path + } + + return out.join(' ') +} + +function mirrorAbsolutePathVertically(path: string, height: number): string { + const tokens = path.match(/[MLAZ]|-?\d*\.?\d+(?:e[-+]?\d+)?/gi) + if (!tokens) return path + + const out: string[] = [] + let i = 0 + while (i < tokens.length) { + const cmd = tokens[i++] + if (!cmd) break + out.push(cmd) + if (cmd === 'Z') continue + if (cmd === 'M' || cmd === 'L') { + const x = Number(tokens[i++]) + const y = Number(tokens[i++]) + out.push(String(x), String(height - y)) + continue + } + if (cmd === 'A') { + const rx = tokens[i++] + const ry = tokens[i++] + const rot = tokens[i++] + const largeArc = tokens[i++] + const sweep = Number(tokens[i++]) + const x = Number(tokens[i++]) + const y = Number(tokens[i++]) + out.push(rx, ry, rot, largeArc, String(sweep ? 0 : 1), String(x), String(height - y)) + } + } + return out.join(' ') +} + +// --------------------------------------------------------------------------- +// Preset shape registry +// --------------------------------------------------------------------------- + +export const presetShapes: Map = new Map() + +// ===== Basic Shapes ===== + +presetShapes.set('rect', (w, h) => `M0,0 L${w},0 L${w},${h} L0,${h} Z`) + +presetShapes.set('roundRect', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 16667) + const r = Math.min(w, h) * a + return [ + `M${r},0`, + `L${w - r},0`, + `A${r},${r} 0 0,1 ${w},${r}`, + `L${w},${h - r}`, + `A${r},${r} 0 0,1 ${w - r},${h}`, + `L${r},${h}`, + `A${r},${r} 0 0,1 0,${h - r}`, + `L0,${r}`, + `A${r},${r} 0 0,1 ${r},0`, + 'Z', + ].join(' ') +}) + +presetShapes.set('plaque', (w, h, adjustments) => { + // OOXML: adj default 16667, concave (inward) arc corners via negative sweep arcTo + const a = Math.min(Math.max(adjRaw(adjustments, 'adj', 16667), 0), 50000) + const x1 = (Math.min(w, h) * a) / 100000 + const x2 = w - x1 + const y2 = h - x1 + // Start at (0, x1), arcTo with negative sweep creates concave corner + const a1 = ooArcTo(0, x1, x1, x1, 90, -90) // top-left: ends at (x1, 0) + const a2 = ooArcTo(x2, 0, x1, x1, 180, -90) // top-right: ends at (w, x1) + const a3 = ooArcTo(w, y2, x1, x1, 270, -90) // bottom-right: ends at (x2, h) + const a4 = ooArcTo(x1, h, x1, x1, 0, -90) // bottom-left: ends at (0, y2) -> close to (0, x1) + return [ + `M0,${x1}`, + a1.svg, + `L${x2},0`, + a2.svg, + `L${w},${y2}`, + a3.svg, + `L${x1},${h}`, + a4.svg, + 'Z', + ].join(' ') +}) + +// Tab family: OOXML uses dx = sqrt(w²+h²)/20 (diagonal/20) +presetShapes.set('cornerTabs', (w, h) => { + const dx = Math.sqrt(w * w + h * h) / 20 + return [ + `M0,0 L${dx},0 L0,${dx} Z`, + `M${w},0 L${w - dx},0 L${w},${dx} Z`, + `M${w},${h} L${w - dx},${h} L${w},${h - dx} Z`, + `M0,${h} L${dx},${h} L0,${h - dx} Z`, + ].join(' ') +}) + +presetShapes.set('squareTabs', (w, h) => { + const dx = Math.sqrt(w * w + h * h) / 20 + return [ + `M0,0 L${dx},0 L${dx},${dx} L0,${dx} Z`, + `M${w - dx},0 L${w},0 L${w},${dx} L${w - dx},${dx} Z`, + `M0,${h - dx} L${dx},${h - dx} L${dx},${h} L0,${h} Z`, + `M${w - dx},${h - dx} L${w},${h - dx} L${w},${h} L${w - dx},${h} Z`, + ].join(' ') +}) + +presetShapes.set('plaqueTabs', (w, h) => { + const dx = Math.sqrt(w * w + h * h) / 20 + return [ + `M0,0 L${dx},0 A${dx},${dx} 0 0,1 0,${dx} Z`, + `M${w},0 L${w - dx},0 A${dx},${dx} 0 0,0 ${w},${dx} Z`, + `M0,${h} L0,${h - dx} A${dx},${dx} 0 0,1 ${dx},${h} Z`, + `M${w},${h} L${w - dx},${h} A${dx},${dx} 0 0,1 ${w},${h - dx} Z`, + ].join(' ') +}) + +presetShapes.set('ellipse', (w, h) => { + const rx = w / 2 + const ry = h / 2 + return [`M${w},${ry}`, `A${rx},${ry} 0 1,1 0,${ry}`, `A${rx},${ry} 0 1,1 ${w},${ry}`, 'Z'].join( + ' ' + ) +}) + +presetShapes.set('triangle', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 50000) + const topX = w * a + return `M${topX},0 L${w},${h} L0,${h} Z` +}) + +presetShapes.set('isosTriangle', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 50000) + const topX = w * a + return `M${topX},0 L${w},${h} L0,${h} Z` +}) + +presetShapes.set('rtTriangle', (w, h) => `M0,0 L${w},${h} L0,${h} Z`) + +presetShapes.set('diamond', (w, h) => { + const cx = w / 2 + const cy = h / 2 + return `M${cx},0 L${w},${cy} L${cx},${h} L0,${cy} Z` +}) + +presetShapes.set('pentagon', (w, h) => { + // OOXML pentagon: hf=105146, vf=110557 with center shifted to svc so top vertex = y=0. + const hc = w / 2 + const swd2 = (hc * 105146) / 100000 + const shd2 = ((h / 2) * 110557) / 100000 + const svc = shd2 // svc = vc * vf/100000 = shd2, so top vertex at svc - shd2 = 0 + const dx1 = swd2 * Math.cos((18 * Math.PI) / 180) // cos 1080000 + const dx2 = swd2 * Math.cos((54 * Math.PI) / 180) // cos 18360000 + const dy1 = shd2 * Math.sin((18 * Math.PI) / 180) // sin 1080000 + const dy2 = shd2 * Math.sin((54 * Math.PI) / 180) // |sin 18360000| + return [ + `M${hc - dx1},${svc - dy1}`, // x1, y1 (upper-left) + `L${hc},0`, // hc, t (top) + `L${hc + dx1},${svc - dy1}`, // x4, y1 (upper-right) + `L${hc + dx2},${svc + dy2}`, // x3, y2 (lower-right) + `L${hc - dx2},${svc + dy2}`, // x2, y2 (lower-left) + 'Z', + ].join(' ') +}) + +presetShapes.set('hexagon', (w, h, adjustments) => { + // OOXML hexagon: adj=25000, vf=115470 (2/√3 scale factor for regular hex). + const ss = Math.min(w, h) + const a = Math.min( + Math.max(adjRaw(adjustments, 'adj', 25000), 0), + ss > 0 ? (50000 * w) / ss : 50000 + ) + const vf = 115470 + const shd2 = ((h / 2) * vf) / 100000 + const x1 = (ss * a) / 100000 + const x2 = w - x1 + const _hc = w / 2 + const vc = h / 2 + // dy1 = sin(shd2, 60°) = shd2 * sin(60°) + const dy1 = shd2 * Math.sin((60 * Math.PI) / 180) + const y1 = vc - dy1 + const y2 = vc + dy1 + return [ + `M0,${vc}`, + `L${x1},${y1}`, + `L${x2},${y1}`, + `L${w},${vc}`, + `L${x2},${y2}`, + `L${x1},${y2}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('octagon', (w, h, adjustments) => { + // OOXML octagon: adj=29289 (≈1-1/√2). Uses ss-based cuts for both x and y. + const ss = Math.min(w, h) + const a = Math.min(Math.max(adjRaw(adjustments, 'adj', 29289), 0), 50000) + const x1 = (ss * a) / 100000 + const x2 = w - x1 + const y2 = h - x1 + return [ + `M0,${x1}`, + `L${x1},0`, + `L${x2},0`, + `L${w},${x1}`, + `L${w},${y2}`, + `L${x2},${h}`, + `L${x1},${h}`, + `L0,${y2}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('heptagon', (w, h) => { + // OOXML heptagon: hf=102572, vf=105210 with shifted center. + const hc = w / 2 + const swd2 = (hc * 102572) / 100000 + const shd2 = ((h / 2) * 105210) / 100000 + const svc = ((h / 2) * 105210) / 100000 + // Pre-computed trig ratios from OOXML spec (scaled by 100000) + const dx1 = (swd2 * 97493) / 100000 // cos(12.857°) ≈ sin(77.14°) + const dx2 = (swd2 * 78183) / 100000 // cos(38.57°) + const dx3 = (swd2 * 43388) / 100000 // cos(64.29°) + const dy1 = (shd2 * 62349) / 100000 // sin(38.57°) + const dy2 = (shd2 * 22252) / 100000 // sin(12.857°) + const dy3 = (shd2 * 90097) / 100000 // sin(64.29°) + return [ + `M${hc - dx1},${svc + dy2}`, // x1, y2 (left) + `L${hc - dx2},${svc - dy1}`, // x2, y1 (upper-left) + `L${hc},0`, // hc, t (top: svc - shd2 = 0) + `L${hc + dx2},${svc - dy1}`, // x5, y1 (upper-right) + `L${hc + dx1},${svc + dy2}`, // x6, y2 (right) + `L${hc + dx3},${svc + dy3}`, // x4, y3 (lower-right) + `L${hc - dx3},${svc + dy3}`, // x3, y3 (lower-left) + 'Z', + ].join(' ') +}) +presetShapes.set('decagon', (w, h) => { + // OOXML decagon: vf=105146 (no hf, uses wd2 for x). 10 vertices starting from left. + const hc = w / 2 + const vc = h / 2 + const shd2 = (vc * 105146) / 100000 + // OOXML angles: 2160000=36°, 4320000=72° + const dx1 = hc * Math.cos((36 * Math.PI) / 180) // cos(wd2, 2160000) + const dx2 = hc * Math.cos((72 * Math.PI) / 180) // cos(wd2, 4320000) + const dy1 = shd2 * Math.sin((72 * Math.PI) / 180) // sin(shd2, 4320000) + const dy2 = shd2 * Math.sin((36 * Math.PI) / 180) // sin(shd2, 2160000) + return [ + `M0,${vc}`, // l, vc + `L${hc - dx1},${vc - dy2}`, // x1, y2 + `L${hc - dx2},${vc - dy1}`, // x2, y1 + `L${hc + dx2},${vc - dy1}`, // x3, y1 + `L${hc + dx1},${vc - dy2}`, // x4, y2 + `L${w},${vc}`, // r, vc + `L${hc + dx1},${vc + dy2}`, // x4, y3 + `L${hc + dx2},${vc + dy1}`, // x3, y4 + `L${hc - dx2},${vc + dy1}`, // x2, y4 + `L${hc - dx1},${vc + dy2}`, // x1, y3 + 'Z', + ].join(' ') +}) +presetShapes.set('dodecagon', (w, h) => { + // OOXML dodecagon: 21600-unit coordinate space, simple ratios. + const x1 = (w * 2894) / 21600 + const x2 = (w * 7906) / 21600 + const x3 = (w * 13694) / 21600 + const x4 = (w * 18706) / 21600 + const y1 = (h * 2894) / 21600 + const y2 = (h * 7906) / 21600 + const y3 = (h * 13694) / 21600 + const y4 = (h * 18706) / 21600 + return [ + `M0,${y2}`, + `L${x1},${y1}`, + `L${x2},0`, + `L${x3},0`, + `L${x4},${y1}`, + `L${w},${y2}`, + `L${w},${y3}`, + `L${x4},${y4}`, + `L${x3},${h}`, + `L${x2},${h}`, + `L${x1},${y4}`, + `L0,${y3}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('parallelogram', (w, h, adjustments) => { + // OOXML: adj=25000, x2 = ss * a / 100000, path: M(l,b)→L(x2,t)→L(r,t)→L(r-x2,b)→Z + const ss = Math.min(w, h) + const maxAdj = ss > 0 ? (100000 * w) / ss : 100000 + const a = Math.min(Math.max(adjRaw(adjustments, 'adj', 25000), 0), maxAdj) + const x2 = (ss * a) / 100000 + const x5 = w - x2 + return `M0,${h} L${x2},0 L${w},0 L${x5},${h} Z` +}) + +presetShapes.set('trapezoid', (w, h, adjustments) => { + // OOXML: adj=25000, x2 = ss * a / 100000, x3 = r - x2 + const ss = Math.min(w, h) + const maxAdj = ss > 0 ? (50000 * w) / ss : 50000 + const a = Math.min(Math.max(adjRaw(adjustments, 'adj', 25000), 0), maxAdj) + const x2 = (ss * a) / 100000 + const x3 = w - x2 + return `M0,${h} L${x2},0 L${x3},0 L${w},${h} Z` +}) + +presetShapes.set('nonIsoscelesTrapezoid', (w, h, adjustments) => { + // OOXML: Two independent top insets. adj1=25000, adj2=25000 + const ss = Math.min(w, h) + const maxAdj = ss > 0 ? (50000 * w) / ss : 50000 + const a1 = Math.min(Math.max(adjRaw(adjustments, 'adj1', 25000), 0), maxAdj) + const a2 = Math.min(Math.max(adjRaw(adjustments, 'adj2', 25000), 0), maxAdj) + const x2 = (ss * a1) / 100000 + const dx3 = (ss * a2) / 100000 + const x3 = w - dx3 + return `M0,${h} L${x2},0 L${x3},0 L${w},${h} Z` +}) + +presetShapes.set('corner', (w, h, adjustments) => { + // OOXML corner: two adjustments control horizontal and vertical arm thickness. + // adj1 (default 50000) → vertical arm height from bottom: dy1 = ss * a1, y1 = h - dy1 + // adj2 (default 50000) → horizontal arm width from left: x1 = ss * a2 + const ss = Math.min(w, h) + const a1 = Math.min(Math.max(adj(adjustments, 'adj1', 50000), 0), 1) + const a2 = Math.min(Math.max(adj(adjustments, 'adj2', 50000), 0), 1) + const x1 = ss * a2 + const dy1 = ss * a1 + const y1 = h - dy1 + return [`M0,0`, `L${x1},0`, `L${x1},${y1}`, `L${w},${y1}`, `L${w},${h}`, `L0,${h}`, 'Z'].join(' ') +}) + +presetShapes.set('diagStripe', (w, h, adjustments) => { + const a = Math.min(Math.max(adj(adjustments, 'adj', 50000), 0), 1) + const x2 = w * a + const y2 = h * a + return [`M0,${y2}`, `L${x2},0`, `L${w},0`, `L0,${h}`, 'Z'].join(' ') +}) + +// ===== Star Shapes ===== + +presetShapes.set('star4', (w, h, adjustments) => { + // OOXML default adj=12500 → innerRatio = 12500/50000 = 0.25 + const a = adj(adjustments, 'adj', 12500) * 2 + return starShape(w, h, 4, Math.min(Math.max(a, 0), 1)) +}) +presetShapes.set('star5', (w, h, adjustments) => { + // OOXML: adj=19098, hf=105146, vf=110557 — scaling factors for non-square bounding box + const aRaw = adjustments?.get('adj') ?? 19098 + const a = Math.min(Math.max(aRaw, 0), 50000) + const hf = 105146 + const vf = 110557 + const swd2 = ((w / 2) * hf) / 100000 + const shd2 = ((h / 2) * vf) / 100000 + const svc = ((h / 2) * vf) / 100000 + const iwd2 = (swd2 * a) / 50000 + const ihd2 = (shd2 * a) / 50000 + const cx = w / 2 + const step = (2 * Math.PI) / 5 + const halfStep = step / 2 + const startAngle = -Math.PI / 2 + const parts: string[] = [] + for (let i = 0; i < 5; i++) { + const outerAngle = startAngle + step * i + const innerAngle = outerAngle + halfStep + const ox = cx + swd2 * Math.cos(outerAngle) + const oy = svc + shd2 * Math.sin(outerAngle) + const ix = cx + iwd2 * Math.cos(innerAngle) + const iy = svc + ihd2 * Math.sin(innerAngle) + parts.push(i === 0 ? `M${ox},${oy}` : `L${ox},${oy}`) + parts.push(`L${ix},${iy}`) + } + parts.push('Z') + return parts.join(' ') +}) +presetShapes.set('star6', (w, h, adjustments) => { + // OOXML: adj=28868, hf=115470 — horizontal scaling factor + const aRaw = adjustments?.get('adj') ?? 28868 + const a = Math.min(Math.max(aRaw, 0), 50000) + const hf = 115470 + const swd2 = ((w / 2) * hf) / 100000 + const shd2 = h / 2 // no vf for star6 + const iwd2 = (swd2 * a) / 50000 + const ihd2 = (shd2 * a) / 50000 + const cx = w / 2 + const cy = h / 2 + const step = (2 * Math.PI) / 6 + const halfStep = step / 2 + const startAngle = -Math.PI / 2 + const parts: string[] = [] + for (let i = 0; i < 6; i++) { + const outerAngle = startAngle + step * i + const innerAngle = outerAngle + halfStep + const ox = cx + swd2 * Math.cos(outerAngle) + const oy = cy + shd2 * Math.sin(outerAngle) + const ix = cx + iwd2 * Math.cos(innerAngle) + const iy = cy + ihd2 * Math.sin(innerAngle) + parts.push(i === 0 ? `M${ox},${oy}` : `L${ox},${oy}`) + parts.push(`L${ix},${iy}`) + } + parts.push('Z') + return parts.join(' ') +}) +presetShapes.set('star7', (w, h, adjustments) => { + // OOXML star7: adj=34601, hf=102572, vf=105210 — center shifted to svc + const aRaw = adjustments?.get('adj') ?? 34601 + const a = Math.min(Math.max(aRaw, 0), 50000) + const swd2 = ((w / 2) * 102572) / 100000 + const shd2 = ((h / 2) * 105210) / 100000 + const svc = shd2 // = vc * vf/100000 so top vertex at svc - shd2 = 0 + const iwd2 = (swd2 * a) / 50000 + const ihd2 = (shd2 * a) / 50000 + const cx = w / 2 + const step = (2 * Math.PI) / 7 + const halfStep = step / 2 + const startAngle = -Math.PI / 2 + const parts: string[] = [] + for (let i = 0; i < 7; i++) { + const outerAngle = startAngle + step * i + const innerAngle = outerAngle + halfStep + const ox = cx + swd2 * Math.cos(outerAngle) + const oy = svc + shd2 * Math.sin(outerAngle) + const ix = cx + iwd2 * Math.cos(innerAngle) + const iy = svc + ihd2 * Math.sin(innerAngle) + parts.push(i === 0 ? `M${ox},${oy}` : `L${ox},${oy}`) + parts.push(`L${ix},${iy}`) + } + parts.push('Z') + return parts.join(' ') +}) +presetShapes.set('star8', (w, h, adjustments) => { + // OOXML: iwd2 = wd2 * adj / 50000. adj default=37500 → innerRatio = 37500/50000 = 0.75 + // adj() divides by 100000, so we multiply by 2 to get adj/50000. + const a = adj(adjustments, 'adj', 37500) * 2 + return starShape(w, h, 8, Math.min(Math.max(a, 0), 1)) +}) +presetShapes.set('star10', (w, h, adjustments) => { + // OOXML: adj=42533, hf=105146 — horizontal scaling factor + const aRaw = adjustments?.get('adj') ?? 42533 + const a = Math.min(Math.max(aRaw, 0), 50000) + const hf = 105146 + const swd2 = ((w / 2) * hf) / 100000 + const shd2 = h / 2 // no vf for star10 + const iwd2 = (swd2 * a) / 50000 + const ihd2 = (shd2 * a) / 50000 + const cx = w / 2 + const cy = h / 2 + const step = (2 * Math.PI) / 10 + const halfStep = step / 2 + const startAngle = -Math.PI / 2 + const parts: string[] = [] + for (let i = 0; i < 10; i++) { + const outerAngle = startAngle + step * i + const innerAngle = outerAngle + halfStep + const ox = cx + swd2 * Math.cos(outerAngle) + const oy = cy + shd2 * Math.sin(outerAngle) + const ix = cx + iwd2 * Math.cos(innerAngle) + const iy = cy + ihd2 * Math.sin(innerAngle) + parts.push(i === 0 ? `M${ox},${oy}` : `L${ox},${oy}`) + parts.push(`L${ix},${iy}`) + } + parts.push('Z') + return parts.join(' ') +}) +presetShapes.set('star12', (w, h, adjustments) => { + // OOXML default adj=37500 → innerRatio = 0.75 + const a = adj(adjustments, 'adj', 37500) * 2 + return starShape(w, h, 12, Math.min(Math.max(a, 0), 1)) +}) +presetShapes.set('star16', (w, h, adjustments) => { + // OOXML default adj=37500 → innerRatio = 0.75 + const a = adj(adjustments, 'adj', 37500) * 2 + return starShape(w, h, 16, Math.min(Math.max(a, 0), 1)) +}) +presetShapes.set('star24', (w, h, adjustments) => { + // OOXML default adj=37500 → innerRatio = 0.75 + const a = adj(adjustments, 'adj', 37500) * 2 + return starShape(w, h, 24, Math.min(Math.max(a, 0), 1)) +}) +presetShapes.set('star32', (w, h, adjustments) => { + // OOXML default adj=37500 → innerRatio = 0.75 + const a = adj(adjustments, 'adj', 37500) * 2 + return starShape(w, h, 32, Math.min(Math.max(a, 0), 1)) +}) + +// ===== Lines & Connectors ===== + +// OOXML line: diagonal (0,0→w,h) when both extents are non-zero. +// Keep explicit horizontal/vertical handling for zero-extent cases so 1px SVGs remain visible. +presetShapes.set('line', (w, h) => { + const safeH = h || 1 + const safeW = w || 1 + if (w === 0) return `M0.5,0 L0.5,${safeH}` + if (h === 0) return `M0,0.5 L${safeW},0.5` + return `M0,0 L${w},${h}` +}) + +// Inverse diagonal line (top-right to bottom-left). +presetShapes.set('lineInv', (w, h) => { + const safeH = h || 1 + const safeW = w || 1 + if (w === 0) return `M0.5,0 L0.5,${safeH}` + if (h === 0) return `M0,0.5 L${safeW},0.5` + return `M${w},0 L0,${h}` +}) + +// When one dimension is 0, draw horizontal or vertical line (same as 'line') so gradient and stroke are correct +presetShapes.set('straightConnector1', (w, h) => { + const safeH = h || 1 + const safeW = w || 1 + if (w === 0) return `M0.5,0 L0.5,${safeH}` + if (h === 0) return `M0,0.5 L${safeW},0.5` + return `M0,0 L${w},${h}` +}) + +presetShapes.set('bentConnector2', (w, h) => `M0,0 L${w},0 L${w},${h}`) + +presetShapes.set('bentConnector3', (w, h, adjustments) => { + const a = adj(adjustments, 'adj1', 50000) + const midX = w * a + return `M0,0 L${midX},0 L${midX},${h} L${w},${h}` +}) + +presetShapes.set('bentConnector4', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 50000) + const a2 = adj(adjustments, 'adj2', 50000) + const midX = w * a1 + const midY = h * a2 + return `M0,0 L${midX},0 L${midX},${midY} L${w},${midY} L${w},${h}` +}) + +presetShapes.set('curvedConnector2', (w, h) => { + return `M0,0 C${w},0 0,${h} ${w},${h}` +}) + +presetShapes.set('curvedConnector3', (w, h, adjustments) => { + // OOXML: two cubic Bezier segments joined at midpoint (x2, vc) + const x2 = w * adj(adjustments, 'adj1', 50000) + const x1 = x2 / 2 // +/ l x2 2 + const x3 = (w + x2) / 2 // +/ r x2 2 + const vc = h / 2 + const hd4 = h / 4 + const y3 = (h * 3) / 4 + return `M0,0 C${x1},0 ${x2},${hd4} ${x2},${vc} C${x2},${y3} ${x3},${h} ${w},${h}` +}) + +presetShapes.set('curvedConnector4', (w, h, adjustments) => { + // OOXML: three cubic Bezier segments + const x2 = w * adj(adjustments, 'adj1', 50000) + const y4 = h * adj(adjustments, 'adj2', 50000) + const x1 = x2 / 2 // +/ l x2 2 + const x3 = (w + x2) / 2 // +/ r x2 2 + const x4 = (x2 + x3) / 2 // +/ x2 x3 2 + const x5 = (x3 + w) / 2 // +/ x3 r 2 + const y1 = y4 / 2 // +/ t y4 2 + const y2 = y1 / 2 // +/ t y1 2 + const y3 = (y1 + y4) / 2 // +/ y1 y4 2 + const y5 = (h + y4) / 2 // +/ b y4 2 + return [ + `M0,0`, + `C${x1},0 ${x2},${y2} ${x2},${y1}`, + `C${x2},${y3} ${x4},${y4} ${x3},${y4}`, + `C${x5},${y4} ${w},${y5} ${w},${h}`, + ].join(' ') +}) + +presetShapes.set('curvedConnector5', (w, h, adjustments) => { + // OOXML: four cubic Bezier segments + const x3 = w * adj(adjustments, 'adj1', 50000) + const y4 = h * adj(adjustments, 'adj2', 50000) + const x6 = w * adj(adjustments, 'adj3', 50000) + const x1 = (x3 + x6) / 2 // +/ x3 x6 2 + const x2 = x3 / 2 // +/ l x3 2 + const x4 = (x3 + x1) / 2 // +/ x3 x1 2 + const x5 = (x6 + x1) / 2 // +/ x6 x1 2 + const x7 = (x6 + w) / 2 // +/ x6 r 2 + const y1 = y4 / 2 // +/ t y4 2 + const y2 = y1 / 2 // +/ t y1 2 + const y3 = (y1 + y4) / 2 // +/ y1 y4 2 + const y5 = (h + y4) / 2 // +/ b y4 2 + const y6 = (y5 + y4) / 2 // +/ y5 y4 2 + const y7 = (y5 + h) / 2 // +/ y5 b 2 + return [ + `M0,0`, + `C${x2},0 ${x3},${y2} ${x3},${y1}`, + `C${x3},${y3} ${x4},${y4} ${x1},${y4}`, + `C${x5},${y4} ${x6},${y6} ${x6},${y5}`, + `C${x6},${y7} ${x7},${h} ${w},${h}`, + ].join(' ') +}) + +presetShapes.set('bentConnector5', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 50000) + const a2 = adj(adjustments, 'adj2', 50000) + const a3 = adj(adjustments, 'adj3', 50000) + const x1 = w * a1 + const y1 = h * a2 + const x2 = w * a3 + return `M0,0 L${x1},0 L${x1},${y1} L${x2},${y1} L${x2},${h} L${w},${h}` +}) + +// ===== Arrow Shapes ===== + +presetShapes.set('rightArrow', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 50000) // shaft width ratio + const a2 = adj(adjustments, 'adj2', 50000) // head length ratio + const ss = Math.min(w, h) // OOXML uses short side for head length + const shaftHalfH = (h * a1) / 2 + const headLen = ss * a2 + const cy = h / 2 + const shaftEnd = w - headLen + return [ + `M0,${cy - shaftHalfH}`, + `L${shaftEnd},${cy - shaftHalfH}`, + `L${shaftEnd},0`, + `L${w},${cy}`, + `L${shaftEnd},${h}`, + `L${shaftEnd},${cy + shaftHalfH}`, + `L0,${cy + shaftHalfH}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('leftArrow', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 50000) + const a2 = adj(adjustments, 'adj2', 50000) + const ss = Math.min(w, h) + const shaftHalfH = (h * a1) / 2 + const headLen = ss * a2 + const cy = h / 2 + return [ + `M${w},${cy - shaftHalfH}`, + `L${headLen},${cy - shaftHalfH}`, + `L${headLen},0`, + `L0,${cy}`, + `L${headLen},${h}`, + `L${headLen},${cy + shaftHalfH}`, + `L${w},${cy + shaftHalfH}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('upArrow', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 50000) + const a2 = adj(adjustments, 'adj2', 50000) + const shaftHalfW = (w * a1) / 2 + const headLen = h * a2 + const cx = w / 2 + return [ + `M${cx - shaftHalfW},${h}`, + `L${cx - shaftHalfW},${headLen}`, + `L0,${headLen}`, + `L${cx},0`, + `L${w},${headLen}`, + `L${cx + shaftHalfW},${headLen}`, + `L${cx + shaftHalfW},${h}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('downArrow', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 50000) + const a2 = adj(adjustments, 'adj2', 50000) + const shaftHalfW = (w * a1) / 2 + const headLen = h * a2 + const cx = w / 2 + const shaftEnd = h - headLen + return [ + `M${cx - shaftHalfW},0`, + `L${cx + shaftHalfW},0`, + `L${cx + shaftHalfW},${shaftEnd}`, + `L${w},${shaftEnd}`, + `L${cx},${h}`, + `L0,${shaftEnd}`, + `L${cx - shaftHalfW},${shaftEnd}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('downArrowCallout', (w, h, adjustments) => { + // ECMA-like callout geometry (4 adjustments). + const adj1 = adjustments?.get('adj1') ?? 25000 + const adj2 = adjustments?.get('adj2') ?? 25000 + const adj3 = adjustments?.get('adj3') ?? 25000 + const adj4 = adjustments?.get('adj4') ?? 64977 + const ss = Math.min(w, h) + const a2 = Math.max(0, Math.min(adj2, (50000 * w) / Math.max(ss, 1))) + const a1 = Math.max(0, Math.min(adj1, a2 * 2)) + const a3 = Math.max(0, Math.min(adj3, (100000 * h) / Math.max(ss, 1))) + const q2 = (a3 * ss) / Math.max(h, 1) + const a4 = Math.max(0, Math.min(adj4, 100000 - q2)) + const hc = w / 2 + const dx1 = (ss * a2) / 100000 + const dx2 = (ss * a1) / 200000 + const x1 = hc - dx1 + const x2 = hc - dx2 + const x3 = hc + dx2 + const x4 = hc + dx1 + const y3 = h - (ss * a3) / 100000 + const y2 = (h * a4) / 100000 + return [ + `M0,0`, + `L${w},0`, + `L${w},${y2}`, + `L${x3},${y2}`, + `L${x3},${y3}`, + `L${x4},${y3}`, + `L${hc},${h}`, + `L${x1},${y3}`, + `L${x2},${y3}`, + `L${x2},${y2}`, + `L0,${y2}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('rightArrowCallout', (w, h, adjustments) => { + // OOXML: Rectangle body + right-pointing arrowhead (11-point polygon, 4 adj) + const ss = Math.min(w, h) + const maxAdj2 = (50000 * h) / Math.max(ss, 1) + const a2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 25000, maxAdj2)) + const a1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 25000, a2 * 2)) + const maxAdj3 = (100000 * w) / Math.max(ss, 1) + const a3 = Math.max(0, Math.min(adjustments?.get('adj3') ?? 25000, maxAdj3)) + const q2 = (a3 * ss) / Math.max(w, 1) + const a4 = Math.max(0, Math.min(adjustments?.get('adj4') ?? 64977, 100000 - q2)) + const vc = h / 2 + const dy1 = (ss * a2) / 100000 + const dy2 = (ss * a1) / 200000 + const y1 = vc - dy1 + const y2 = vc - dy2 + const y3 = vc + dy2 + const y4 = vc + dy1 + const dx3 = (ss * a3) / 100000 + const x3 = w - dx3 + const x2 = (w * a4) / 100000 + return [ + `M0,0`, + `L${x2},0`, + `L${x2},${y2}`, + `L${x3},${y2}`, + `L${x3},${y1}`, + `L${w},${vc}`, + `L${x3},${y4}`, + `L${x3},${y3}`, + `L${x2},${y3}`, + `L${x2},${h}`, + `L0,${h}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('leftArrowCallout', (w, h, adjustments) => { + // OOXML: Mirror of rightArrowCallout — arrowhead points left + const ss = Math.min(w, h) + const maxAdj2 = (50000 * h) / Math.max(ss, 1) + const a2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 25000, maxAdj2)) + const a1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 25000, a2 * 2)) + const maxAdj3 = (100000 * w) / Math.max(ss, 1) + const a3 = Math.max(0, Math.min(adjustments?.get('adj3') ?? 25000, maxAdj3)) + const q2 = (a3 * ss) / Math.max(w, 1) + const a4 = Math.max(0, Math.min(adjustments?.get('adj4') ?? 64977, 100000 - q2)) + const vc = h / 2 + const dy1 = (ss * a2) / 100000 + const dy2 = (ss * a1) / 200000 + const y1 = vc - dy1 + const y2 = vc - dy2 + const y3 = vc + dy2 + const y4 = vc + dy1 + const x1 = (ss * a3) / 100000 + const dx2 = (w * a4) / 100000 + const x2 = w - dx2 + return [ + `M0,${vc}`, + `L${x1},${y1}`, + `L${x1},${y2}`, + `L${x2},${y2}`, + `L${x2},0`, + `L${w},0`, + `L${w},${h}`, + `L${x2},${h}`, + `L${x2},${y3}`, + `L${x1},${y3}`, + `L${x1},${y4}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('upArrowCallout', (w, h, adjustments) => { + // OOXML: Vertical variant — arrowhead points up + const ss = Math.min(w, h) + const maxAdj2 = (50000 * w) / Math.max(ss, 1) + const a2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 25000, maxAdj2)) + const a1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 25000, a2 * 2)) + const maxAdj3 = (100000 * h) / Math.max(ss, 1) + const a3 = Math.max(0, Math.min(adjustments?.get('adj3') ?? 25000, maxAdj3)) + const q2 = (a3 * ss) / Math.max(h, 1) + const a4 = Math.max(0, Math.min(adjustments?.get('adj4') ?? 64977, 100000 - q2)) + const hc = w / 2 + const dx1 = (ss * a2) / 100000 + const dx2 = (ss * a1) / 200000 + const x1 = hc - dx1 + const x2 = hc - dx2 + const x3 = hc + dx2 + const x4 = hc + dx1 + const y1 = (ss * a3) / 100000 + const dy2 = (h * a4) / 100000 + const y2 = h - dy2 + return [ + `M0,${y2}`, + `L${x2},${y2}`, + `L${x2},${y1}`, + `L${x1},${y1}`, + `L${hc},0`, + `L${x4},${y1}`, + `L${x3},${y1}`, + `L${x3},${y2}`, + `L${w},${y2}`, + `L${w},${h}`, + `L0,${h}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('upDownArrowCallout', (w, h, adjustments) => { + // OOXML spec: 4 adjustments + const adj1Raw = adjustments?.get('adj1') ?? 25000 + const adj2Raw = adjustments?.get('adj2') ?? 25000 + const adj3Raw = adjustments?.get('adj3') ?? 25000 + const adj4Raw = adjustments?.get('adj4') ?? 48123 + const ss = Math.min(w, h) + const a2 = Math.max(0, Math.min(adj2Raw, (50000 * w) / Math.max(ss, 1))) + const a1 = Math.max(0, Math.min(adj1Raw, a2 * 2)) + const a3 = Math.max(0, Math.min(adj3Raw, (50000 * h) / Math.max(ss, 1))) + const q2 = (a3 * ss) / Math.max(h, 1) + const a4 = Math.max(0, Math.min(adj4Raw, 100000 - q2 - q2)) + const dx1 = (ss * a2) / 100000 + const dx2 = (ss * a1) / 200000 + const hc = w / 2 + const x1 = hc - dx1 + const x2 = hc - dx2 + const x3 = hc + dx2 + const x4 = hc + dx1 + const y1 = (ss * a3) / 100000 + const dy2 = (h * a4) / 200000 + const y2 = h / 2 - dy2 + const y3 = h / 2 + dy2 + const y4 = h - y1 + return [ + `M${hc},0`, + `L${x4},${y1}`, + `L${x3},${y1}`, + `L${x3},${y2}`, + `L${w},${y2}`, + `L${w},${y3}`, + `L${x3},${y3}`, + `L${x3},${y4}`, + `L${x4},${y4}`, + `L${hc},${h}`, + `L${x1},${y4}`, + `L${x2},${y4}`, + `L${x2},${y3}`, + `L0,${y3}`, + `L0,${y2}`, + `L${x2},${y2}`, + `L${x2},${y1}`, + `L${x1},${y1}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('leftRightArrowCallout', (w, h, adjustments) => { + // OOXML spec: 4 adjustments + const adj1Raw = adjustments?.get('adj1') ?? 25000 + const adj2Raw = adjustments?.get('adj2') ?? 25000 + const adj3Raw = adjustments?.get('adj3') ?? 25000 + const adj4Raw = adjustments?.get('adj4') ?? 48123 + const ss = Math.min(w, h) + const a2 = Math.max(0, Math.min(adj2Raw, (50000 * h) / Math.max(ss, 1))) + const a1 = Math.max(0, Math.min(adj1Raw, a2 * 2)) + const a3 = Math.max(0, Math.min(adj3Raw, (50000 * w) / Math.max(ss, 1))) + const q2 = (a3 * ss) / Math.max(w, 1) + const a4 = Math.max(0, Math.min(adj4Raw, 100000 - q2 - q2)) + const dy1 = (ss * a2) / 100000 + const dy2 = (ss * a1) / 200000 + const vc = h / 2 + const y1 = vc - dy1 + const y2 = vc - dy2 + const y3 = vc + dy2 + const y4 = vc + dy1 + const x1 = (ss * a3) / 100000 + const dx2 = (w * a4) / 200000 + const x2 = w / 2 - dx2 + const x3 = w / 2 + dx2 + const x4 = w - x1 + return [ + `M0,${vc}`, + `L${x1},${y1}`, + `L${x1},${y2}`, + `L${x2},${y2}`, + `L${x2},0`, + `L${x3},0`, + `L${x3},${y2}`, + `L${x4},${y2}`, + `L${x4},${y1}`, + `L${w},${vc}`, + `L${x4},${y4}`, + `L${x4},${y3}`, + `L${x3},${y3}`, + `L${x3},${h}`, + `L${x2},${h}`, + `L${x2},${y3}`, + `L${x1},${y3}`, + `L${x1},${y4}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('uturnArrow', (w, h, adjustments) => { + // ECMA-like U-turn arrow geometry (5 adjustments). + const adj1 = adjustments?.get('adj1') ?? 25000 + const adj2 = adjustments?.get('adj2') ?? 25000 + const adj3 = adjustments?.get('adj3') ?? 25000 + const adj4 = adjustments?.get('adj4') ?? 43750 + const adj5 = adjustments?.get('adj5') ?? 75000 + const ss = Math.min(w, h) + const a2 = Math.max(0, Math.min(adj2, 25000)) + const a1 = Math.max(0, Math.min(adj1, a2 * 2)) + const q2 = (a1 * ss) / Math.max(h, 1) + const q3 = 100000 - q2 + const a3 = Math.max(0, Math.min(adj3, (q3 * h) / Math.max(ss, 1))) + const minAdj5 = ((a3 + a1) * ss) / Math.max(h, 1) + const a5 = Math.max(minAdj5, Math.min(adj5, 100000)) + + const th = (ss * a1) / 100000 + const aw2 = (ss * a2) / 100000 + const th2 = th / 2 + const dh2 = aw2 - th2 + const y5 = (h * a5) / 100000 + const ah = (ss * a3) / 100000 + const y4 = y5 - ah + const x9 = w - dh2 + const bs = Math.min(x9 / 2, y4) + const a4 = Math.max(0, Math.min(adj4, (100000 * bs) / Math.max(ss, 1))) + const bd = (ss * a4) / 100000 + const bd2 = Math.max(bd - th, 0) + const x3 = th + bd2 + const x8 = w - aw2 + const x6 = x8 - aw2 + const x7 = x6 + dh2 + const x4 = x9 - bd + const x5 = x7 - bd2 + + return [ + `M0,${h}`, + `L0,${bd}`, + bd > 0.1 ? `A${bd},${bd} 0 0,1 ${bd},0` : `L0,0`, + `L${x4},0`, + bd > 0.1 ? `A${bd},${bd} 0 0,1 ${x9},${bd}` : `L${x9},0`, + `L${x9},${y4}`, + `L${w},${y4}`, + `L${x8},${y5}`, + `L${x6},${y4}`, + `L${x7},${y4}`, + `L${x7},${x3}`, + bd2 > 0.1 ? `A${bd2},${bd2} 0 0,0 ${x5},${th}` : `L${x5},${th}`, + `L${x3},${th}`, + bd2 > 0.1 ? `A${bd2},${bd2} 0 0,0 ${th},${x3}` : `L${th},${x3}`, + `L${th},${h}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('leftRightArrow', (w, h, adjustments) => { + // OOXML: adj1=50000 (shaft width), adj2=50000 (head length based on ss) + const ss = Math.min(w, h) + const hd2 = h / 2 + const maxAdj2 = ss > 0 ? (50000 * w) / ss : 0 + const a1 = Math.min(Math.max(adjustments?.get('adj1') ?? 50000, 0), 100000) + const a2 = Math.min(Math.max(adjustments?.get('adj2') ?? 50000, 0), maxAdj2) + const x2 = (ss * a2) / 100000 + const x3 = w - x2 + const dy = (h * a1) / 200000 + const vc = hd2 + const y1 = vc - dy + const y2 = vc + dy + const dx1 = hd2 > 0 ? (y1 * x2) / hd2 : 0 + const _x1 = x2 - dx1 + const _x4 = x3 + dx1 + return [ + `M0,${vc}`, + `L${x2},0`, + `L${x2},${y1}`, + `L${x3},${y1}`, + `L${x3},0`, + `L${w},${vc}`, + `L${x3},${h}`, + `L${x3},${y2}`, + `L${x2},${y2}`, + `L${x2},${h}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('leftUpArrow', (w, h, adjustments) => { + // OOXML preset formula (presetShapeDefinitions.xml -> leftUpArrow) + const rawAdj2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 25000, 50000)) + const maxAdj1 = rawAdj2 * 2 + const rawAdj1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 25000, maxAdj1)) + const maxAdj3 = 100000 - maxAdj1 + const rawAdj3 = Math.max(0, Math.min(adjustments?.get('adj3') ?? 25000, maxAdj3)) + + const ss = Math.min(w, h) + const x1 = (ss * rawAdj3) / 100000 + const dx2 = (ss * rawAdj2) / 50000 + const x2 = w - dx2 + const y2 = h - dx2 + const dx4 = (ss * rawAdj2) / 100000 + const x4 = w - dx4 + const y4 = h - dx4 + const dx3 = (ss * rawAdj1) / 200000 + const x3 = x4 - dx3 + const x5 = x4 + dx3 + const y3 = y4 - dx3 + const y5 = y4 + dx3 + + return [ + `M0,${y4}`, + `L${x1},${y2}`, + `L${x1},${y3}`, + `L${x3},${y3}`, + `L${x3},${x1}`, + `L${x2},${x1}`, + `L${x4},0`, + `L${w},${x1}`, + `L${x5},${x1}`, + `L${x5},${y5}`, + `L${x1},${y5}`, + `L${x1},${h}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('upDownArrow', (w, h, adjustments) => { + // OOXML spec: adj1=50000 (shaft width), adj2=50000 (head length on ss) + const adj1Raw = adjustments?.get('adj1') ?? 50000 + const adj2Raw = adjustments?.get('adj2') ?? 50000 + const ss = Math.min(w, h) + const maxAdj2 = (50000 * h) / Math.max(ss, 1) + const a2 = Math.max(0, Math.min(adj2Raw, maxAdj2)) + const a1 = Math.max(0, Math.min(adj1Raw, 100000)) + const dx1 = (ss * a1) / 200000 // shaft half-width + const dy = (ss * a2) / 100000 // arrowhead length + const hc = w / 2 + return [ + `M${hc},0`, + `L${w},${dy}`, + `L${hc + dx1},${dy}`, + `L${hc + dx1},${h - dy}`, + `L${w},${h - dy}`, + `L${hc},${h}`, + `L0,${h - dy}`, + `L${hc - dx1},${h - dy}`, + `L${hc - dx1},${dy}`, + `L0,${dy}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('notchedRightArrow', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 50000) // shaft width ratio + const a2 = adj(adjustments, 'adj2', 50000) // head length ratio + const ss = Math.min(w, h) // OOXML uses short side for head length + const shaftHalfH = (h * a1) / 2 + const headLen = ss * a2 + const cy = h / 2 + const shaftEnd = w - headLen + // Notch depth: OOXML formula dxn = dy1 * dx2 / hd2 = shaftHalfH * headLen / (h/2) + const notchDepth = cy > 0 ? (shaftHalfH * headLen) / cy : 0 + return [ + `M0,${cy - shaftHalfH}`, + `L${shaftEnd},${cy - shaftHalfH}`, + `L${shaftEnd},0`, + `L${w},${cy}`, + `L${shaftEnd},${h}`, + `L${shaftEnd},${cy + shaftHalfH}`, + `L0,${cy + shaftHalfH}`, + `L${notchDepth},${cy}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('chevron', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 50000) + const ss = Math.min(w, h) + const offset = ss * a + return [ + `M0,0`, + `L${w - offset},0`, + `L${w},${h / 2}`, + `L${w - offset},${h}`, + `L0,${h}`, + `L${offset},${h / 2}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('homePlate', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 50000) + const ss = Math.min(w, h) + const offset = ss * a + const shoulderX = w - offset + return [`M0,0`, `L${shoulderX},0`, `L${w},${h / 2}`, `L${shoulderX},${h}`, `L0,${h}`, 'Z'].join( + ' ' + ) +}) + +presetShapes.set('stripedRightArrow', (w, h, adjustments) => { + // OOXML: adj1=50000, adj2=50000 (max 84375). Stripes at ssd32, ssd16-ssd8, x4=ss*5/32. + const ss = Math.min(w, h) + const maxAdj2 = ss > 0 ? (84375 * w) / ss : 84375 + const a1 = Math.min(Math.max(adjRaw(adjustments, 'adj1', 50000), 0), 100000) + const a2 = Math.min(Math.max(adjRaw(adjustments, 'adj2', 50000), 0), maxAdj2) + const dy1 = (h * a1) / 200000 + const dx5 = (ss * a2) / 100000 + const x5 = w - dx5 + const vc = h / 2 + const y1 = vc - dy1 + const y2 = vc + dy1 + const ssd32 = ss / 32 + const ssd16 = ss / 16 + const ssd8 = ss / 8 + const x4 = (ss * 5) / 32 + return [ + // Stripe 1: 0 to ssd32 + `M0,${y1} L${ssd32},${y1} L${ssd32},${y2} L0,${y2} Z`, + // Stripe 2: ssd16 to ssd8 + `M${ssd16},${y1} L${ssd8},${y1} L${ssd8},${y2} L${ssd16},${y2} Z`, + // Main body + arrowhead: x4 to r + `M${x4},${y1}`, + `L${x5},${y1}`, + `L${x5},0`, + `L${w},${vc}`, + `L${x5},${h}`, + `L${x5},${y2}`, + `L${x4},${y2}`, + 'Z', + ].join(' ') +}) + +// ===== Bent / Curved / Special Arrows ===== + +presetShapes.set('bentArrow', (w, h, adjustments) => { + // OOXML bentArrow: L-shaped arrow with rounded bend, arrowhead pointing right. + // Uses 4 adjustments per ECMA-376 spec. + const ss = Math.min(w, h) + + // Constrained adjustments (raw values, not fractions — we do our own math) + const adj2Raw = Math.max(0, Math.min(adjustments?.get('adj2') ?? 25000, 50000)) + const maxAdj1 = adj2Raw * 2 + const adj1Raw = Math.max(0, Math.min(adjustments?.get('adj1') ?? 25000, maxAdj1)) + const adj3Raw = Math.max(0, Math.min(adjustments?.get('adj3') ?? 25000, 50000)) + + const th = (ss * adj1Raw) / 100000 // shaft width + const aw2 = (ss * adj2Raw) / 100000 // arrowhead half-width + const th2 = th / 2 + const dh2 = aw2 - th2 // arrowhead extension beyond shaft + const ah = (ss * adj3Raw) / 100000 // arrowhead length + + const bw = w - ah + const bh = h - dh2 + const bs = Math.min(bw, bh) + const maxAdj4 = bs > 0 ? (100000 * bs) / ss : 0 + const adj4Raw = Math.max(0, Math.min(adjustments?.get('adj4') ?? 43750, maxAdj4)) + const bd = (ss * adj4Raw) / 100000 // outer bend radius + + const bd2 = Math.max(bd - th, 0) // inner bend radius + const x3 = th + bd2 + const x4 = w - ah + + const y3 = dh2 + th + const y4 = y3 + dh2 + const y5 = dh2 + bd + + // OOXML arcTo: from current point, arc with radii (wR, hR), start angle stAng, sweep swAng. + // Arc 1: outer bend — from (0, y5), radii=bd, 180°→270° (sweep +90°) + // Center of arc is at (bd, y5) relative, endpoint at (bd, y5-bd) = (bd, dh2) + // SVG: A bd,bd 0 0,1 bd,dh2 + // Arc 2: inner bend — from (x3, y3), radii=bd2, 270°→180° (sweep -90°) + // Center at (x3, y3+bd2), endpoint at (x3-bd2, y3+bd2) = (th, y3+bd2) + // SVG: A bd2,bd2 0 0,0 th,y6 where y6 = y3+bd2 + + const y6 = y3 + bd2 + + const parts: string[] = [ + `M0,${h}`, // bottom-left + `L0,${y5}`, // up left edge to arc start + ] + + // Outer arc (rounded bend, going from left edge up to top edge) + if (bd > 0.1) { + parts.push(`A${bd},${bd} 0 0,1 ${bd},${dh2}`) + } else { + parts.push(`L0,${dh2}`) // degenerate: straight corner + } + + parts.push( + `L${x4},${dh2}`, // horizontal to arrowhead base (top) + `L${x4},0`, // up to arrowhead top-left wing + `L${w},${aw2}`, // arrowhead tip (pointing right) + `L${x4},${y4}`, // arrowhead bottom wing + `L${x4},${y3}`, // back to arrowhead base (bottom) + `L${x3},${y3}` // horizontal back toward bend + ) + + // Inner arc (rounded bend, going from top down to right side of shaft) + if (bd2 > 0.1) { + parts.push(`A${bd2},${bd2} 0 0,0 ${th},${y6}`) + } else { + parts.push(`L${th},${y3}`) // degenerate: straight corner + } + + parts.push( + `L${th},${h}`, // down right side of shaft to bottom + 'Z' + ) + + return parts.join(' ') +}) + +presetShapes.set('bentUpArrow', (w, h, adjustments) => { + // OOXML preset formula (presetShapeDefinitions.xml -> bentUpArrow): + // x/y variables are solved from adj1/2/3 in [0..50000], ss=min(w,h). + const raw1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 25000, 50000)) + const raw2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 25000, 50000)) + const raw3 = Math.max(0, Math.min(adjustments?.get('adj3') ?? 25000, 50000)) + const ss = Math.min(w, h) + + const y1 = (ss * raw3) / 100000 + const dx1 = (ss * raw2) / 50000 + const x1 = w - dx1 + const dx3 = (ss * raw2) / 100000 + const x3 = w - dx3 + const dx2 = (ss * raw1) / 200000 + const x2 = x3 - dx2 + const x4 = x3 + dx2 + const dy2 = (ss * raw1) / 100000 + const y2 = h - dy2 + + return [ + `M0,${y2}`, + `L${x2},${y2}`, + `L${x2},${y1}`, + `L${x1},${y1}`, + `L${x3},0`, + `L${w},${y1}`, + `L${x4},${y1}`, + `L${x4},${h}`, + `L0,${h}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('curvedRightArrow', (w, h, adjustments) => { + // Keep geometry aligned with OOXML preset math. Use local arc helper here + // because preset formulas mix positive/negative sweeps that do not map 1:1 + // to the generic shapeArc() helper used in other shapes. + const adj1Raw = adjustments?.get('adj1') ?? 25000 + const adj2Raw = adjustments?.get('adj2') ?? 50000 + const adj3Raw = adjustments?.get('adj3') ?? 25000 + + const cnstVal1 = 50000 + const cnstVal2 = 100000 + + const hd2 = h / 2 + const r = w + const b = h + const l = 0 + const c3d4 = 270 + const cd2 = 180 + const cd4 = 90 + const ss = Math.max(Math.min(w, h), 1) + + const maxAdj2 = (cnstVal1 * h) / ss + const a2 = Math.max(0, Math.min(adj2Raw, maxAdj2)) + const a1 = Math.max(0, Math.min(adj1Raw, a2)) + const th = (ss * a1) / cnstVal2 + const aw = (ss * a2) / cnstVal2 + const q1 = (th + aw) / 4 + const hR = hd2 - q1 + const q7 = hR * 2 + const q8 = q7 * q7 + const q9 = th * th + const q10 = Math.max(q8 - q9, 0) + const q11 = Math.sqrt(q10) + const iDx = (q11 * w) / Math.max(q7, 1e-6) + const maxAdj3 = (cnstVal2 * iDx) / ss + const a3 = Math.max(0, Math.min(adj3Raw, maxAdj3)) + const ah = (ss * a3) / cnstVal2 + const y3 = hR + th + const q2 = w * w + const q3 = ah * ah + const q4 = Math.max(q2 - q3, 0) + const q5 = Math.sqrt(q4) + const dy = (q5 * hR) / Math.max(w, 1e-6) + const y5 = hR + dy + const y7 = y3 + dy + const q6 = aw - th + const dh = q6 / 2 + const y4 = y5 - dh + const y8 = y7 + dh + const aw2 = aw / 2 + const y6 = b - aw2 + const x1 = r - ah + const swAng = Math.atan(dy / Math.max(ah, 1e-6)) + const stAng = Math.PI - swAng + const mswAng = -swAng + const q12 = th / 2 + const dang2 = Math.atan2(q12, Math.max(iDx, 1e-6)) + const swAng2 = dang2 - Math.PI / 2 + const stAngDg = (stAng * 180) / Math.PI + const mswAngDg = (mswAng * 180) / Math.PI + const swAngDg = (swAng * 180) / Math.PI + const swAng2Dg = (swAng2 * 180) / Math.PI + + const arc = ( + cx: number, + cy: number, + rx: number, + ry: number, + startDeg: number, + endDeg: number + ): string => { + const s = (startDeg * Math.PI) / 180 + const e = (endDeg * Math.PI) / 180 + const xS = cx + rx * Math.cos(s) + const yS = cy + ry * Math.sin(s) + const xE = cx + rx * Math.cos(e) + const yE = cy + ry * Math.sin(e) + const delta = endDeg - startDeg + const largeArc = Math.abs(delta) > 180 ? 1 : 0 + const sweep = delta >= 0 ? 1 : 0 + return `M${xS},${yS} A${rx},${ry} 0 ${largeArc},${sweep} ${xE},${yE}` + } + + return [ + `M${l},${hR}`, + arc(w, hR, w, hR, cd2, cd2 + mswAngDg).replace('M', 'L'), + `L${x1},${y5}`, + `L${x1},${y4}`, + `L${r},${y6}`, + `L${x1},${y8}`, + `L${x1},${y7}`, + arc(w, y3, w, hR, stAngDg, stAngDg + swAngDg).replace('M', 'L'), + 'Z', + arc(w, hR, w, hR, cd2, cd2 + cd4), + `L${r},${th}`, + arc(w, y3, w, hR, c3d4, c3d4 + swAng2Dg).replace('M', 'L'), + 'Z', + ].join(' ') +}) + +presetShapes.set('curvedLeftArrow', (w, h, adjustments) => + mirrorAbsolutePathHorizontally(presetShapes.get('curvedRightArrow')!(w, h, adjustments), w) +) + +function splitFirstClosedContour(path: string): { outer: string; remainder: string } { + const closeIdx = path.indexOf('Z') + if (closeIdx === -1) { + return { outer: path, remainder: '' } + } + const outer = path.slice(0, closeIdx + 1).trim() + const remainder = path.slice(closeIdx + 1).trim() + return { outer, remainder } +} + +function buildCurvedArrowMultiPath( + shapeName: 'curvedRightArrow' | 'curvedLeftArrow', + w: number, + h: number, + adjustments?: Map +): PresetSubPath[] { + const fullPath = presetShapes.get(shapeName)!(w, h, adjustments) + const { outer, remainder } = splitFirstClosedContour(fullPath) + if (!remainder) { + return [{ d: fullPath, fill: 'norm', stroke: true }] + } + + if (shapeName === 'curvedRightArrow') { + return [ + { d: remainder, fill: 'norm', stroke: true }, + { d: outer, fill: 'norm', stroke: true }, + ] + } + + return [ + { d: outer, fill: 'norm', stroke: true }, + { d: remainder, fill: 'norm', stroke: true }, + ] +} + +function buildCurvedVerticalArrowMultiPath( + shapeName: 'curvedUpArrow' | 'curvedDownArrow', + w: number, + h: number, + adjustments?: Map +): PresetSubPath[] { + const downFullPath = presetShapes.get('curvedDownArrow')!(w, h, adjustments) + const { outer, remainder } = splitFirstClosedContour(downFullPath) + const ordered: PresetSubPath[] = remainder + ? [ + { d: remainder, fill: 'norm', stroke: true }, + { d: outer, fill: 'norm', stroke: true }, + ] + : [{ d: downFullPath, fill: 'norm', stroke: true }] + + if (shapeName === 'curvedDownArrow') { + return ordered + } + + const mirrored: PresetSubPath[] = ordered.map((path) => ({ + ...path, + d: mirrorAbsolutePathVertically(path.d, h), + })) + return mirrored.reverse() +} + +/** + * Convert OOXML arcTo to SVG arc endpoint and command string. + * OOXML arcTo: wR, hR (radii), stAng, swAng (degrees). + * Current point is at stAng on the arc ellipse. + * Returns { path, endX, endY }. + */ +presetShapes.set('curvedUpArrow', (w, h, adjustments) => { + const arc = ( + cx: number, + cy: number, + rx: number, + ry: number, + startDeg: number, + endDeg: number + ): string => { + const s = (startDeg * Math.PI) / 180 + const e = (endDeg * Math.PI) / 180 + const xS = cx + rx * Math.cos(s) + const yS = cy + ry * Math.sin(s) + const xE = cx + rx * Math.cos(e) + const yE = cy + ry * Math.sin(e) + const delta = endDeg - startDeg + const largeArc = Math.abs(delta) > 180 ? 1 : 0 + const sweep = delta >= 0 ? 1 : 0 + return `M${xS},${yS} A${rx},${ry} 0 ${largeArc},${sweep} ${xE},${yE}` + } + + const ss = Math.min(w, h) + const wd2 = w / 2 + const a1Raw = adjustments?.get('adj1') ?? 25000 + const a2Raw = adjustments?.get('adj2') ?? 50000 + const a3Raw = adjustments?.get('adj3') ?? 25000 + const maxAdj2 = (50000 * w) / Math.max(ss, 1) + const a2 = Math.max(0, Math.min(a2Raw, maxAdj2)) + const a1 = Math.max(0, Math.min(a1Raw, 100000)) + const th = (ss * a1) / 100000 + const aw = (ss * a2) / 100000 + const q1 = (th + aw) / 4 + const wR = wd2 - q1 + const q7 = wR * 2 + const idy = (Math.sqrt(Math.max(q7 * q7 - th * th, 0)) * h) / Math.max(q7, 1) + const maxAdj3 = (100000 * idy) / Math.max(ss, 1) + const a3 = Math.max(0, Math.min(a3Raw, maxAdj3)) + const ah = (ss * a3) / 100000 + const x3 = wR + th + const dx = (Math.sqrt(Math.max(h * h - ah * ah, 0)) * wR) / Math.max(h, 1) + const x5 = wR + dx + const x7 = x3 + dx + const dh = (aw - th) / 2 + const x4 = x5 - dh + const x8 = x7 + dh + const x6 = w - aw / 2 + const y1 = ah + + const swAng = Math.atan2(dx, ah) + const dang2 = Math.atan2(th / 2, idy) + const stAng2 = Math.PI / 2 - dang2 + const swAng2 = dang2 - swAng + const stAng3 = Math.PI / 2 - swAng + const stAng2Deg = (stAng2 * 180) / Math.PI + const swAng2Deg = (swAng2 * 180) / Math.PI + const stAng3Deg = (stAng3 * 180) / Math.PI + const swAngDeg = (swAng * 180) / Math.PI + + return [ + arc(wR, 0, wR, h, stAng2Deg, stAng2Deg + swAng2Deg), + `L${x5},${y1}`, + `L${x4},${y1}`, + `L${x6},0`, + `L${x8},${y1}`, + `L${x7},${y1}`, + arc(x3, 0, wR, h, stAng3Deg, stAng3Deg + swAngDeg).replace('M', 'L'), + `L${wR},${h}`, + arc(wR, 0, wR, h, 90, 180).replace('M', 'L'), + `L${th},0`, + arc(x3, 0, wR, h, 180, 90).replace('M', 'L'), + 'Z', + ].join(' ') +}) + +presetShapes.set('curvedDownArrow', (w, h, adjustments) => { + const arc = ( + cx: number, + cy: number, + rx: number, + ry: number, + startDeg: number, + endDeg: number + ): string => { + const s = (startDeg * Math.PI) / 180 + const e = (endDeg * Math.PI) / 180 + const xS = cx + rx * Math.cos(s) + const yS = cy + ry * Math.sin(s) + const xE = cx + rx * Math.cos(e) + const yE = cy + ry * Math.sin(e) + const delta = endDeg - startDeg + const largeArc = Math.abs(delta) > 180 ? 1 : 0 + const sweep = delta >= 0 ? 1 : 0 + return `M${xS},${yS} A${rx},${ry} 0 ${largeArc},${sweep} ${xE},${yE}` + } + + const ss = Math.min(w, h) + const wd2 = w / 2 + const a1Raw = adjustments?.get('adj1') ?? 25000 + const a2Raw = adjustments?.get('adj2') ?? 50000 + const a3Raw = adjustments?.get('adj3') ?? 25000 + const maxAdj2 = (50000 * w) / Math.max(ss, 1) + const a2 = Math.max(0, Math.min(a2Raw, maxAdj2)) + const a1 = Math.max(0, Math.min(a1Raw, 100000)) + const th = (ss * a1) / 100000 + const aw = (ss * a2) / 100000 + const q1 = (th + aw) / 4 + const wR = wd2 - q1 + const q7 = wR * 2 + const idy = (Math.sqrt(Math.max(q7 * q7 - th * th, 0)) * h) / Math.max(q7, 1) + const maxAdj3 = (100000 * idy) / Math.max(ss, 1) + const a3 = Math.max(0, Math.min(a3Raw, maxAdj3)) + const ah = (ss * a3) / 100000 + const x3 = wR + th + const dx = (Math.sqrt(Math.max(h * h - ah * ah, 0)) * wR) / Math.max(h, 1) + const x5 = wR + dx + const x7 = x3 + dx + const dh = (aw - th) / 2 + const x4 = x5 - dh + const x8 = x7 + dh + const x6 = w - aw / 2 + const y1 = h - ah + + const swAng = Math.atan2(dx, ah) + const swAngDeg = (swAng * 180) / Math.PI + const dang2 = Math.atan2(th / 2, idy) + const dang2Deg = (dang2 * 180) / Math.PI + const stAng = 270 + swAngDeg + const stAng2 = 270 - dang2Deg + const swAng2 = dang2Deg - 90 + const swAng3 = 90 + dang2Deg + + return [ + `M${x6},${h}`, + `L${x4},${y1}`, + `L${x5},${y1}`, + arc(wR, h, wR, h, stAng, stAng - swAngDeg).replace('M', 'L'), + `L${x3},0`, + arc(x3, h, wR, h, 270, 270 + swAngDeg).replace('M', 'L'), + `L${x5 + th},${y1}`, + `L${x8},${y1}`, + 'Z', + `M${x3},0`, + arc(x3, h, wR, h, stAng2, stAng2 + swAng2).replace('M', 'L'), + arc(wR, h, wR, h, 180, 180 + swAng3).replace('M', 'L'), + 'Z', + ].join(' ') +}) + +function buildCircularArrowPath( + w: number, + h: number, + adjustments?: Map, + _mirrorX = false, + variant: 'circularArrow' | 'leftCircularArrow' = 'circularArrow' +): string { + // OOXML circularArrow / leftCircularArrow: same guide formulas, different default adjustments. + const hc = w / 2 + const vc = h / 2 + const wd2 = w / 2 + const hd2 = h / 2 + const ss = Math.min(w, h) + const cd2 = 10800000 // 180° in 60000ths + + const toRad60k = (a: number) => ((a / 60000) * Math.PI) / 180 + + // OOXML formula helpers + const ooxSin = (val: number, ang: number) => val * Math.sin(toRad60k(ang)) + const ooxCos = (val: number, ang: number) => val * Math.cos(toRad60k(ang)) + const cat2 = (r: number, ht: number, wt: number) => r * Math.cos(Math.atan2(wt, ht)) + const sat2 = (r: number, ht: number, wt: number) => r * Math.sin(Math.atan2(wt, ht)) + // OOXML: at2(x, y) = atan2(y, x) — first arg is x, second is y + const at2 = (x: number, y: number) => ((Math.atan2(y, x) * 180) / Math.PI) * 60000 + const modF = (x: number, y: number, z: number) => Math.sqrt(x * x + y * y + z * z) + + // Adjustments — leftCircularArrow has different OOXML defaults + const isLeft = variant === 'leftCircularArrow' + const adj1 = adjustments?.get('adj1') ?? 12500 + const adj2 = adjustments?.get('adj2') ?? (isLeft ? -1142319 : 1142319) + const adj3 = adjustments?.get('adj3') ?? (isLeft ? 1142319 : 20457681) + const adj4 = adjustments?.get('adj4') ?? 10800000 + const adj5v = adjustments?.get('adj5') ?? 12500 + + const a5 = Math.max(0, Math.min(adj5v, 25000)) + const maxAdj1 = a5 * 2 + const a1 = Math.max(0, Math.min(adj1, maxAdj1)) + const enAng = Math.max(1, Math.min(adj3, 21599999)) + const stAng = Math.max(0, Math.min(adj4, 21599999)) + + const th = (ss * a1) / 100000 + const thh = (ss * a5) / 100000 + const th2 = th / 2 + + const rw1 = wd2 + th2 - thh + const rh1 = hd2 + th2 - thh + const rw2 = rw1 - th + const rh2 = rh1 - th + const rw3 = rw2 + th2 + const rh3 = rh2 + th2 + + // Point H (mid-radius at end angle) + const wtH = ooxSin(rw3, enAng) + const htH = ooxCos(rh3, enAng) + const dxH = cat2(rw3, htH, wtH) + const dyH = sat2(rh3, htH, wtH) + const xH = hc + dxH + const yH = vc + dyH + + // Compute max arrowhead angle + const rI = Math.min(rw2, rh2) + const u1 = dxH * dxH + const u2 = dyH * dyH + const u3 = rI * rI + const u4 = u1 - u3 + const u5 = u2 - u3 + const u6 = u2 !== 0 ? (u4 * u5) / u1 : 0 + const u7 = u2 !== 0 ? u6 / u2 : 0 + const u8 = 1 - u7 + const u9 = Math.sqrt(Math.max(0, u8)) + const u10 = dxH !== 0 ? u4 / dxH : 0 + const u11 = dyH !== 0 ? u10 / dyH : 0 + const u12 = u11 !== 0 ? (1 + u9) / u11 : 0 + const u13 = at2(1, u12) + const u14 = u13 + 21600000 + const u15 = u13 >= 0 ? u13 : u14 + const u16 = u15 - enAng + const u17 = u16 + 21600000 + const u18 = u16 >= 0 ? u16 : u17 + const u19 = u18 - cd2 + const u20 = u18 - 21600000 + const u21 = u19 >= 0 ? u20 : u18 + const maxAng = Math.abs(u21) + let aAng: number + if (isLeft) { + // leftCircularArrow: minAng = -abs(u21), a2 = -abs(adj2), aAng = pin(minAng, a2, 0) + const minAng = -maxAng + const a2 = -Math.abs(adj2) + aAng = Math.max(minAng, Math.min(a2, 0)) + } else { + aAng = Math.max(0, Math.min(adj2, maxAng)) + } + const ptAng = enAng + aAng + + // Point A (arrowhead tip) + const wtA = ooxSin(rw3, ptAng) + const htA = ooxCos(rh3, ptAng) + const dxA = cat2(rw3, htA, wtA) + const dyA = sat2(rh3, htA, wtA) + const xA = hc + dxA + const yA = vc + dyA + + // Point E (outer arc start) + const wtE = ooxSin(rw1, stAng) + const htE = ooxCos(rh1, stAng) + const dxE = cat2(rw1, htE, wtE) + const dyE = sat2(rh1, htE, wtE) + const xE = hc + dxE + const yE = vc + dyE + + // Points G and B (arrowhead base, offset from H by thh at angle ptAng) + const dxG = ooxCos(thh, ptAng) + const dyG = ooxSin(thh, ptAng) + const xG = xH + dxG + const yG = yH + dyG + const xB = xH - dxG + const yB = yH - dyG + + // Scale to normalized circle for line-circle intersection + const sx1 = xB - hc + const sy1 = yB - vc + const sx2 = xG - hc + const sy2 = yG - vc + + // Outer circle intersection + const rO = Math.min(rw1, rh1) + const x1O = rw1 !== 0 ? (sx1 * rO) / rw1 : 0 + const y1O = rh1 !== 0 ? (sy1 * rO) / rh1 : 0 + const x2O = rw1 !== 0 ? (sx2 * rO) / rw1 : 0 + const y2O = rh1 !== 0 ? (sy2 * rO) / rh1 : 0 + + const dxO = x2O - x1O + const dyO = y2O - y1O + const dOval = modF(dxO, dyO, 0) + + const q1 = x1O * y2O + const q2 = x2O * y1O + const DO = q1 - q2 + + const q3 = rO * rO + const q4 = dOval * dOval + const q5 = q3 * q4 + const q6 = DO * DO + const q7 = q5 - q6 + const q8 = Math.max(q7, 0) + const sdelO = Math.sqrt(q8) + + const ndyO = dyO * -1 + const sdyO = ndyO >= 0 ? -1 : 1 + const q9 = sdyO * dxO + const q10 = q9 * sdelO + const q11 = DO * dyO + const dxF1 = q4 !== 0 ? (q11 + q10) / q4 : 0 + const q12 = q11 - q10 + const dxF2 = q4 !== 0 ? q12 / q4 : 0 + + const adyO = Math.abs(dyO) + const q13 = adyO * sdelO + const q14 = DO * dxO * -1 + const dyF1 = q4 !== 0 ? (q14 + q13) / q4 : 0 + const q15 = q14 - q13 + const dyF2 = q4 !== 0 ? q15 / q4 : 0 + + // Pick intersection closest to G side + const q16 = x2O - dxF1 + const q17 = x2O - dxF2 + const q18 = y2O - dyF1 + const q19 = y2O - dyF2 + const q20 = modF(q16, q18, 0) + const q21 = modF(q17, q19, 0) + const q22 = q21 - q20 + const dxF = q22 >= 0 ? dxF1 : dxF2 + const dyF = q22 >= 0 ? dyF1 : dyF2 + + const sdxF = rO !== 0 ? (dxF * rw1) / rO : 0 + const sdyF = rO !== 0 ? (dyF * rh1) / rO : 0 + const xF = hc + sdxF + const yF = vc + sdyF + + // Inner circle intersection + const x1I = rw2 !== 0 ? (sx1 * rI) / rw2 : 0 + const y1I = rh2 !== 0 ? (sy1 * rI) / rh2 : 0 + const x2I = rw2 !== 0 ? (sx2 * rI) / rw2 : 0 + const y2I = rh2 !== 0 ? (sy2 * rI) / rh2 : 0 + + const dxI = x2I - x1I + const dyI = y2I - y1I + const dI = modF(dxI, dyI, 0) + const v1 = x1I * y2I + const v2 = x2I * y1I + const DI = v1 - v2 + + const v3 = rI * rI + const v4 = dI * dI + const v5 = v3 * v4 + const v6 = DI * DI + const v7 = v5 - v6 + const v8 = Math.max(v7, 0) + const sdelI = Math.sqrt(v8) + const v9 = sdyO * dxI + const v10 = v9 * sdelI + const v11 = DI * dyI + const dxC1 = v4 !== 0 ? (v11 + v10) / v4 : 0 + const v12 = v11 - v10 + const dxC2 = v4 !== 0 ? v12 / v4 : 0 + + const adyI = Math.abs(dyI) + const v13 = adyI * sdelI + const v14 = DI * dxI * -1 + const dyC1 = v4 !== 0 ? (v14 + v13) / v4 : 0 + const v15 = v14 - v13 + const dyC2 = v4 !== 0 ? v15 / v4 : 0 + + // Pick intersection closest to B side (x1I) + const v16 = x1I - dxC1 + const v17 = x1I - dxC2 + const v18 = y1I - dyC1 + const v19 = y1I - dyC2 + const v20 = modF(v16, v18, 0) + const v21 = modF(v17, v19, 0) + const v22 = v21 - v20 + const dxC = v22 >= 0 ? dxC1 : dxC2 + const dyC = v22 >= 0 ? dyC1 : dyC2 + + const sdxC = rI !== 0 ? (dxC * rw2) / rI : 0 + const sdyC = rI !== 0 ? (dyC * rh2) / rI : 0 + const xC = hc + sdxC + const yC = vc + sdyC + + // Inner arc angles — leftCircularArrow uses intermediate istAng0/iswAng0 + const ist0 = at2(sdxC, sdyC) + const ist1 = ist0 + 21600000 + const istAng0 = ist0 >= 0 ? ist0 : ist1 + const isw1 = stAng - istAng0 + + let istAng: number + let iswAng: number + if (isLeft) { + // leftCircularArrow: iswAng0 always ≥ 0, then istAng shifted, iswAng negated + const iswAng0 = isw1 >= 0 ? isw1 : isw1 + 21600000 + istAng = istAng0 + iswAng0 + iswAng = -iswAng0 + } else { + // circularArrow: iswAng always ≤ 0 (clockwise inner arc) + istAng = istAng0 + iswAng = isw1 >= 0 ? isw1 - 21600000 : isw1 + } + + // Adjusted arrowhead points (clamp when too close) + const p1 = xF - xC + const p2 = yF - yC + const p3 = modF(p1, p2, 0) + const p4 = p3 / 2 + const p5 = p4 - thh + const xGp = p5 >= 0 ? xF : xG + const yGp = p5 >= 0 ? yF : yG + const xBp = p5 >= 0 ? xC : xB + const yBp = p5 >= 0 ? yC : yB + + // Outer arc sweep angle + const en0 = at2(sdxF, sdyF) + const en1 = en0 + 21600000 + const en2 = en0 >= 0 ? en0 : en1 + const sw0 = en2 - stAng + + let outerArcStAng: number + let outerArcSwAng: number + if (isLeft) { + // leftCircularArrow: swAng ≤ 0, then stAng0 = stAng + swAng, swAng0 = -swAng + const swAngRaw = sw0 >= 0 ? sw0 - 21600000 : sw0 + outerArcStAng = stAng + swAngRaw // stAng0 + outerArcSwAng = -swAngRaw // swAng0 (positive) + } else { + const swAng = sw0 >= 0 ? sw0 : sw0 + 21600000 + outerArcStAng = stAng + outerArcSwAng = swAng + } + + // Compute end points for SVG arcs using OOXML arcTo semantics + // Outer arc: from outerArcStAng sweeping outerArcSwAng + const outerEndAng = outerArcStAng + outerArcSwAng + const wtOE = ooxSin(rw1, outerEndAng) + const htOE = ooxCos(rh1, outerEndAng) + const xOE = hc + cat2(rw1, htOE, wtOE) + const yOE = vc + sat2(rh1, htOE, wtOE) + + // Inner arc: from istAng sweeping iswAng + const innerEndAng = istAng + iswAng + const wtIE = ooxSin(rw2, innerEndAng) + const htIE = ooxCos(rh2, innerEndAng) + const xIE = hc + cat2(rw2, htIE, wtIE) + const yIE = vc + sat2(rh2, htIE, wtIE) + + // SVG arc flags + const outerSweepDeg = Math.abs(outerArcSwAng / 60000) + const outerLargeArc = outerSweepDeg > 180 ? 1 : 0 + const outerSweepFlag = outerArcSwAng > 0 ? 1 : 0 + + const innerSweepDeg = Math.abs(iswAng / 60000) + const innerLargeArc = innerSweepDeg > 180 ? 1 : 0 + const innerSweepFlag = iswAng > 0 ? 1 : 0 + + if (isLeft) { + // leftCircularArrow path: M(xE) → L(xD) → inner arc → arrowhead → L(xF) → outer arc → Z + // Point D: inner arc start at stAng on rw2/rh2 + const wtD = ooxSin(rw2, stAng) + const htD = ooxCos(rh2, stAng) + const xD = hc + cat2(rw2, htD, wtD) + const yD = vc + sat2(rh2, htD, wtD) + return [ + `M${xE},${yE}`, + `L${xD},${yD}`, + `A${rw2},${rh2} 0 ${innerLargeArc},${innerSweepFlag} ${xIE},${yIE}`, + `L${xBp},${yBp}`, + `L${xA},${yA}`, + `L${xGp},${yGp}`, + `L${xF},${yF}`, + `A${rw1},${rh1} 0 ${outerLargeArc},${outerSweepFlag} ${xOE},${yOE}`, + 'Z', + ].join(' ') + } + + return [ + `M${xE},${yE}`, + `A${rw1},${rh1} 0 ${outerLargeArc},${outerSweepFlag} ${xOE},${yOE}`, + `L${xGp},${yGp}`, + `L${xA},${yA}`, + `L${xBp},${yBp}`, + `L${xC},${yC}`, + `A${rw2},${rh2} 0 ${innerLargeArc},${innerSweepFlag} ${xIE},${yIE}`, + 'Z', + ].join(' ') +} + +presetShapes.set('circularArrow', (w, h, adjustments) => { + return buildCircularArrowPath(w, h, adjustments, false, 'circularArrow') +}) + +// leftCircularArrow uses same OOXML guide formulas as circularArrow but different default adjustments. +presetShapes.set('leftCircularArrow', (w, h, adjustments) => { + return buildCircularArrowPath(w, h, adjustments, false, 'leftCircularArrow') +}) + +presetShapes.set('leftRightCircularArrow', (w, h, _adjustments) => { + // Build from the actual oracle PDF vector path (shape id 0177), + // normalized to a 400x280 reference box. + const sx = w / 400 + const sy = h / 280 + const p = (x: number, y: number) => ({ x: x * sx, y: y * sy }) + + const p1 = p(35.0, 140.0) + const p2 = p(19.9536, 89.9471) + const p3 = p(33.4296, 89.9471) + const c1 = p(74.6127, 28.1974) + const c2 = p(182.5744, 0.5489) + const p4 = p(274.5688, 28.1924) + const c3 = p(315.4978, 40.4912) + const c4 = p(348.2481, 62.4743) + const p5 = p(366.5707, 89.9471) + const p6 = p(380.0463, 89.9471) + const p7 = p(365.0, 140.0) + const p8 = p(310.0463, 89.9471) + const p9 = p(320.9838, 89.9471) + const c5 = p(274.3848, 50.3095) + const c6 = p(182.4425, 40.5864) + const p10 = p(115.6249, 68.2298) + const c7 = p(101.3589, 74.1319) + const c8 = p(88.9651, 81.4842) + const p11 = p(79.0159, 89.947) + const p12 = p(89.9536, 89.9471) + + return [ + `M${p1.x},${p1.y}`, + `L${p2.x},${p2.y}`, + `L${p3.x},${p3.y}`, + `C${c1.x},${c1.y} ${c2.x},${c2.y} ${p4.x},${p4.y}`, + `C${c3.x},${c3.y} ${c4.x},${c4.y} ${p5.x},${p5.y}`, + `L${p6.x},${p6.y}`, + `L${p7.x},${p7.y}`, + `L${p8.x},${p8.y}`, + `L${p9.x},${p9.y}`, + `C${c5.x},${c5.y} ${c6.x},${c6.y} ${p10.x},${p10.y}`, + `C${c7.x},${c7.y} ${c8.x},${c8.y} ${p11.x},${p11.y}`, + `L${p12.x},${p12.y}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('quadArrow', (w, h, adjustments) => { + const adj1Raw = adjustments?.get('adj1') ?? 22500 + const adj2Raw = adjustments?.get('adj2') ?? 22500 + const adj3Raw = adjustments?.get('adj3') ?? 22500 + const vc = h / 2 + const hc = w / 2 + const minWH = Math.min(w, h) + const a2 = Math.max(0, Math.min(adj2Raw, 50000)) + const a1 = Math.max(0, Math.min(adj1Raw, 2 * a2)) + const a3 = Math.max(0, Math.min(adj3Raw, (100000 - 2 * a2) / 2)) + const x1 = (minWH * a3) / 100000 + const dx2 = (minWH * a2) / 100000 + const x2 = hc - dx2 + const x5 = hc + dx2 + const dx3 = (minWH * a1) / 200000 + const x3 = hc - dx3 + const x4 = hc + dx3 + const x6 = w - x1 + const y2 = vc - dx2 + const y5 = vc + dx2 + const y3 = vc - dx3 + const y4 = vc + dx3 + const y6 = h - x1 + return [ + `M0,${vc}`, + `L${x1},${y2}`, + `L${x1},${y3}`, + `L${x3},${y3}`, + `L${x3},${x1}`, + `L${x2},${x1}`, + `L${hc},0`, + `L${x5},${x1}`, + `L${x4},${x1}`, + `L${x4},${y3}`, + `L${x6},${y3}`, + `L${x6},${y2}`, + `L${w},${vc}`, + `L${x6},${y5}`, + `L${x6},${y4}`, + `L${x4},${y4}`, + `L${x4},${y6}`, + `L${x5},${y6}`, + `L${hc},${h}`, + `L${x2},${y6}`, + `L${x3},${y6}`, + `L${x3},${y4}`, + `L${x1},${y4}`, + `L${x1},${y5}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('quadArrowCallout', (w, h, adjustments) => { + // OOXML: 28-point polygon with 4 arrowheads (4 adj) + const ss = Math.min(w, h) + const hc = w / 2 + const vc = h / 2 + const a2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 18515, 50000)) + const a1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 18515, a2 * 2)) + const maxAdj3 = 50000 - a2 + const a3 = Math.max(0, Math.min(adjustments?.get('adj3') ?? 18515, maxAdj3)) + const q2 = a3 * 2 + const a4 = Math.max(a1, Math.min(adjustments?.get('adj4') ?? 48123, 100000 - q2)) + const dx2 = (ss * a2) / 100000 + const dx3 = (ss * a1) / 200000 + const ah = (ss * a3) / 100000 + const dx1 = (w * a4) / 200000 + const dy1 = (h * a4) / 200000 + const x8 = w - ah + const x2 = hc - dx1 + const x7 = hc + dx1 + const x3 = hc - dx2 + const x6 = hc + dx2 + const x4 = hc - dx3 + const x5 = hc + dx3 + const y8 = h - ah + const y2 = vc - dy1 + const y7 = vc + dy1 + const y3 = vc - dx2 + const y6 = vc + dx2 + const y4 = vc - dx3 + const y5 = vc + dx3 + return [ + `M0,${vc}`, + `L${ah},${y3}`, + `L${ah},${y4}`, + `L${x2},${y4}`, + `L${x2},${y2}`, + `L${x4},${y2}`, + `L${x4},${ah}`, + `L${x3},${ah}`, + `L${hc},0`, + `L${x6},${ah}`, + `L${x5},${ah}`, + `L${x5},${y2}`, + `L${x7},${y2}`, + `L${x7},${y4}`, + `L${x8},${y4}`, + `L${x8},${y3}`, + `L${w},${vc}`, + `L${x8},${y6}`, + `L${x8},${y5}`, + `L${x7},${y5}`, + `L${x7},${y7}`, + `L${x5},${y7}`, + `L${x5},${y8}`, + `L${x6},${y8}`, + `L${hc},${h}`, + `L${x3},${y8}`, + `L${x4},${y8}`, + `L${x4},${y7}`, + `L${x2},${y7}`, + `L${x2},${y5}`, + `L${ah},${y5}`, + `L${ah},${y6}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('leftRightUpArrow', (w, h, adjustments) => { + // OOXML preset formula (presetShapeDefinitions.xml -> leftRightUpArrow) + const rawAdj2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 25000, 50000)) + const maxAdj1 = rawAdj2 * 2 + const rawAdj1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 25000, maxAdj1)) + const q1 = 100000 - maxAdj1 + const maxAdj3 = q1 / 2 + const rawAdj3 = Math.max(0, Math.min(adjustments?.get('adj3') ?? 25000, maxAdj3)) + + const ss = Math.min(w, h) + const hc = w / 2 + + const x1 = (ss * rawAdj3) / 100000 + const dx2 = (ss * rawAdj2) / 100000 + const x2 = hc - dx2 + const x5 = hc + dx2 + const dx3 = (ss * rawAdj1) / 200000 + const x3 = hc - dx3 + const x4 = hc + dx3 + const x6 = w - x1 + + const dy2 = (ss * rawAdj2) / 50000 + const y2 = h - dy2 + const y4 = h - dx2 + const y3 = y4 - dx3 + const y5 = y4 + dx3 + + return [ + `M0,${y4}`, + `L${x1},${y2}`, + `L${x1},${y3}`, + `L${x3},${y3}`, + `L${x3},${x1}`, + `L${x2},${x1}`, + `L${hc},0`, + `L${x5},${x1}`, + `L${x4},${x1}`, + `L${x4},${y3}`, + `L${x6},${y3}`, + `L${x6},${y2}`, + `L${w},${y4}`, + `L${x6},${h}`, + `L${x6},${y5}`, + `L${x1},${y5}`, + `L${x1},${h}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('swooshArrow', (w, h, adjustments) => { + // OOXML swooshArrow: curved swoosh with arrowhead on the right. + const ss = Math.min(w, h) + const raw1 = adjustments?.get('adj1') ?? 25000 + const raw2 = adjustments?.get('adj2') ?? 16667 + const a1 = Math.max(1, Math.min(raw1, 75000)) + const maxAdj2 = (70000 * w) / ss + const a2 = Math.max(0, Math.min(raw2, maxAdj2)) + const ad1 = (h * a1) / 100000 + const ad2 = (ss * a2) / 100000 + const ssd8 = ss / 8 + const hd6 = h / 6 + const alfa = Math.PI / 2 / 14 // cd4/14 in radians + const tanAlfa = Math.tan(alfa) + const xB = w - ad2 + const yB = ssd8 + const dx0 = ssd8 * tanAlfa + const xC = xB - dx0 + const dx1 = ad1 * tanAlfa + const yF = yB + ad1 + const xF = xB + dx1 + const xE = xF + dx0 + const yE = yF + ssd8 + const dy2 = yE + const dy22 = dy2 / 2 + const dy3 = h / 20 + const yD = dy22 + dy3 + const xP1 = w / 6 + const yP1 = hd6 + hd6 // h/3 + const dy5 = hd6 / 2 + const yP2 = yF + dy5 + const xP2 = w / 4 + return [ + `M0,${h}`, + `Q${xP1},${yP1} ${xB},${yB}`, + `L${xC},0`, + `L${w},${yD}`, + `L${xE},${yE}`, + `L${xF},${yF}`, + `Q${xP2},${yP2} 0,${h}`, + 'Z', + ].join(' ') +}) + +// ===== Flowchart Shapes ===== + +presetShapes.set('flowChartProcess', (w, h) => `M0,0 L${w},0 L${w},${h} L0,${h} Z`) + +presetShapes.set('flowChartDecision', (w, h) => { + const cx = w / 2 + const cy = h / 2 + return `M${cx},0 L${w},${cy} L${cx},${h} L0,${cy} Z` +}) + +presetShapes.set('flowChartTerminator', (w, h) => { + // OOXML: path w=21600 h=21600, wR=3475, hR=10800 (elliptical caps, not circular) + const x1 = (w * 3475) / 21600 + const x2 = (w * 18125) / 21600 + const wR = x1 // w * 3475/21600 + const hR = h / 2 // h * 10800/21600 + return [ + `M${x1},0`, + `L${x2},0`, + `A${wR},${hR} 0 0,1 ${x2},${h}`, + `L${x1},${h}`, + `A${wR},${hR} 0 0,1 ${x1},0`, + 'Z', + ].join(' ') +}) + +presetShapes.set('flowChartDocument', (w, h) => { + // OOXML: path w=21600 h=21600, cubic (21600,17322)(10800,17322)(10800,23922)(0,20172) + const y1 = (h * 17322) / 21600 + const cy1 = y1 // h * 17322/21600 + const cy2 = (h * 23922) / 21600 // extends below h (overshoot for curve) + const y2 = (h * 20172) / 21600 + return [`M0,0`, `L${w},0`, `L${w},${y1}`, `C${w / 2},${cy1} ${w / 2},${cy2} 0,${y2}`, 'Z'].join( + ' ' + ) +}) + +presetShapes.set('flowChartInputOutput', (w, h) => { + // OOXML: path w=5 h=5, points: (0,5)(1,0)(5,0)(4,5) — offset = w/5 + const offset = w / 5 + return `M${offset},0 L${w},0 L${w - offset},${h} L0,${h} Z` +}) + +presetShapes.set('flowChartPredefinedProcess', (w, h) => { + const inset = w * 0.1 + return [ + // Outer rectangle + `M0,0 L${w},0 L${w},${h} L0,${h} Z`, + // Left inner line + `M${inset},0 L${inset},${h}`, + // Right inner line + `M${w - inset},0 L${w - inset},${h}`, + ].join(' ') +}) + +presetShapes.set('flowChartAlternateProcess', (w, h) => { + // OOXML spec: corner radius = ssd6 = min(w,h)/6 + const r = Math.min(w, h) / 6 + return [ + `M${r},0`, + `L${w - r},0`, + `A${r},${r} 0 0,1 ${w},${r}`, + `L${w},${h - r}`, + `A${r},${r} 0 0,1 ${w - r},${h}`, + `L${r},${h}`, + `A${r},${r} 0 0,1 0,${h - r}`, + `L0,${r}`, + `A${r},${r} 0 0,1 ${r},0`, + 'Z', + ].join(' ') +}) + +presetShapes.set('flowChartManualInput', (w, h) => { + const topOffset = h * 0.2 + return `M0,${topOffset} L${w},0 L${w},${h} L0,${h} Z` +}) + +presetShapes.set('flowChartManualOperation', (w, h) => { + // OOXML: path w=5 h=5: (0,0)→(5,0)→(4,5)→(1,5)→close → inset = w/5 + return `M0,0 L${w},0 L${(w * 4) / 5},${h} L${w / 5},${h} Z` +}) + +presetShapes.set('flowChartPreparation', (w, h) => { + const inset = w * 0.2 + const cy = h / 2 + return `M${inset},0 L${w - inset},0 L${w},${cy} L${w - inset},${h} L${inset},${h} L0,${cy} Z` +}) + +presetShapes.set('flowChartData', (w, h) => { + const offset = w * 0.15 + return `M${offset},0 L${w},0 L${w - offset},${h} L0,${h} Z` +}) + +presetShapes.set('flowChartInternalStorage', (w, h) => { + const inset = Math.min(w, h) * 0.12 + return [ + `M0,0 L${w},0 L${w},${h} L0,${h} Z`, + `M${inset},0 L${inset},${h}`, + `M0,${inset} L${w},${inset}`, + ].join(' ') +}) + +presetShapes.set('flowChartMagneticDisk', (w, h) => { + // OOXML spec: path w=6 h=6, top at y=1, arc hR=1 → ry = h/6 + const ry = h / 6 + const bodyTop = ry + const bodyBottom = h - ry + return [ + // Top ellipse + `M0,${bodyTop}`, + `A${w / 2},${ry} 0 1,1 ${w},${bodyTop}`, + // Right side down + `L${w},${bodyBottom}`, + // Bottom ellipse + `A${w / 2},${ry} 0 1,1 0,${bodyBottom}`, + // Left side up + `L0,${bodyTop}`, + 'Z', + // Top ellipse visible arc (back half) + `M${w},${bodyTop}`, + `A${w / 2},${ry} 0 1,1 0,${bodyTop}`, + ].join(' ') +}) + +presetShapes.set('flowChartDelay', (w, h) => { + // OOXML: M(0,0) L(hc,0) arcTo(wd2,hd2, 270°, 180°) L(0,h) Z + // Arc from (hc,0) with wR=w/2 hR=h/2, stAng=270° swAng=180° → semicircle right side + const hc = w / 2 + const a = ooArcTo(hc, 0, hc, h / 2, 270, 180) + return [`M0,0`, `L${hc},0`, a.svg, `L0,${h}`, 'Z'].join(' ') +}) + +presetShapes.set('flowChartDisplay', (w, h) => { + // OOXML: path w=6 h=6, points: (0,3)(1,0)(5,0) arcTo(1,3,270°,180°) (1,6) close + // Scaled: left point at (0, h/2), top-left at (w/6, 0), arc center at (5w/6, h/2) + const sx = w / 6 + const sy = h / 6 + const arcWR = sx // wR = 1 * (w/6) + const arcHR = sy * 3 // hR = 3 * (h/6) = h/2 + const a = ooArcTo(5 * sx, 0, arcWR, arcHR, 270, 180) + return [`M0,${3 * sy}`, `L${sx},0`, `L${5 * sx},0`, a.svg, `L${sx},${h}`, 'Z'].join(' ') +}) + +presetShapes.set('flowChartExtract', (w, h) => `M${w / 2},0 L${w},${h} L0,${h} Z`) + +presetShapes.set('flowChartMerge', (w, h) => `M0,0 L${w},0 L${w / 2},${h} Z`) + +presetShapes.set('flowChartOffpageConnector', (w, h) => { + const arrowH = h * 0.2 + return [`M0,0`, `L${w},0`, `L${w},${h - arrowH}`, `L${w / 2},${h}`, `L0,${h - arrowH}`, 'Z'].join( + ' ' + ) +}) + +presetShapes.set('flowChartConnector', (w, h) => { + const rx = w / 2 + const ry = h / 2 + return [`M${w},${ry}`, `A${rx},${ry} 0 1,1 0,${ry}`, `A${rx},${ry} 0 1,1 ${w},${ry}`, 'Z'].join( + ' ' + ) +}) + +presetShapes.set('flowChartSort', (w, h) => { + const cx = w / 2 + const cy = h / 2 + return [`M${cx},0 L${w},${cy} L${cx},${h} L0,${cy} Z`, `M0,${cy} L${w},${cy}`].join(' ') +}) + +presetShapes.set('flowChartCollate', (w, h) => { + const cx = w / 2 + const cy = h / 2 + return [ + // top inverted triangle + `M0,0 L${w},0 L${cx},${cy} Z`, + // bottom upright triangle + `M0,${h} L${w},${h} L${cx},${cy} Z`, + ].join(' ') +}) + +presetShapes.set('flowChartPunchedTape', (w, h) => { + // OOXML: path w="20" h="20" with arcTo operations. + // Start at (0, 2), four arcs for wavy top/bottom. + const sx = w / 20 + const sy = h / 20 + const arcTo = ( + curX: number, + curY: number, + wR: number, + hR: number, + stAng60k: number, + swAng60k: number + ) => { + const stDeg = stAng60k / 60000 + const swDeg = swAng60k / 60000 + const stRad = (stDeg * Math.PI) / 180 + const endRad = ((stDeg + swDeg) * Math.PI) / 180 + const cx = curX - wR * Math.cos(stRad) + const cy = curY - hR * Math.sin(stRad) + const endX = cx + wR * Math.cos(endRad) + const endY = cy + hR * Math.sin(endRad) + const largeArc = Math.abs(swDeg) > 180 ? 1 : 0 + const sweep = swDeg > 0 ? 1 : 0 + return { endX, endY, svg: `A${wR},${hR} 0 ${largeArc},${sweep} ${endX},${endY}` } + } + // cd2 = 10800000 (180°) + const wR = 5 * sx + const hR = 2 * sy + let x = 0 + let y = 2 * sy + const parts = [`M${x},${y}`] + // Top-left: stAng=cd2(180°), swAng=-cd2(-180°) → dips down + let a = arcTo(x, y, wR, hR, 10800000, -10800000) + parts.push(a.svg) + x = a.endX + y = a.endY + // Top-right: stAng=cd2(180°), swAng=+cd2(+180°) → bumps up + a = arcTo(x, y, wR, hR, 10800000, 10800000) + parts.push(a.svg) + x = a.endX + y = a.endY + // Line to bottom-right + const bx = 20 * sx + const by = 18 * sy + parts.push(`L${bx},${by}`) + x = bx + y = by + // Bottom-right: stAng=0, swAng=-cd2(-180°) → bumps up + a = arcTo(x, y, wR, hR, 0, -10800000) + parts.push(a.svg) + x = a.endX + y = a.endY + // Bottom-left: stAng=0, swAng=+cd2(+180°) → dips down + a = arcTo(x, y, wR, hR, 0, 10800000) + parts.push(a.svg) + parts.push('Z') + return parts.join(' ') +}) + +presetShapes.set('flowChartPunchedCard', (w, h) => { + // OOXML spec: path w=5, h=5. Points: (0,1)(1,0)(5,0)(5,5)(0,5) + const sx = w / 5 + const sy = h / 5 + return `M0,${sy} L${sx},0 L${w},0 L${w},${h} L0,${h} Z` +}) + +presetShapes.set('flowChartSummingJunction', (w, h) => { + // OOXML: Circle with X cross. Returns single path with circle + X lines. + const wd2 = w / 2 + const hd2 = h / 2 + const idx = wd2 * Math.cos(Math.PI / 4) // cos(45°) + const idy = hd2 * Math.sin(Math.PI / 4) + const il = wd2 - idx + const ir = wd2 + idx + const it = hd2 - idy + const ib = hd2 + idy + return [ + // Circle + `M0,${hd2}`, + `A${wd2},${hd2} 0 1,1 ${w},${hd2}`, + `A${wd2},${hd2} 0 1,1 0,${hd2}`, + 'Z', + // X cross + `M${il},${it} L${ir},${ib}`, + `M${ir},${it} L${il},${ib}`, + ].join(' ') +}) + +presetShapes.set('flowChartOr', (w, h) => { + // OOXML: Circle with + cross. + const wd2 = w / 2 + const hd2 = h / 2 + return [ + // Circle + `M0,${hd2}`, + `A${wd2},${hd2} 0 1,1 ${w},${hd2}`, + `A${wd2},${hd2} 0 1,1 0,${hd2}`, + 'Z', + // + cross + `M${wd2},0 L${wd2},${h}`, + `M0,${hd2} L${w},${hd2}`, + ].join(' ') +}) + +presetShapes.set('flowChartOnlineStorage', (w, h) => { + // OOXML: Rounded left side rectangle with concave right cap. + // Normalized: left arc (convex) at x=w/6, right arc (concave) at x=w + const x1 = w / 6 + return [ + `M${x1},0`, + `L${w},0`, + `A${x1},${h / 2} 0 0,0 ${w},${h}`, + `L${x1},${h}`, + `A${x1},${h / 2} 0 0,1 ${x1},0`, + 'Z', + ].join(' ') +}) + +presetShapes.set('flowChartMagneticDrum', (w, h) => { + // OOXML: Horizontal cylinder (magnetic drum). Right ellipse cap visible. + const x1 = w / 6 + const x2 = (w * 5) / 6 + const ry = h / 2 + return [ + // Body + `M${x1},0`, + `L${x2},0`, + `A${x1},${ry} 0 0,1 ${x2},${h}`, + `L${x1},${h}`, + `A${x1},${ry} 0 0,1 ${x1},0`, + 'Z', + // Right ellipse back-face (visible part) + `M${x2},${h}`, + `A${x1},${ry} 0 0,1 ${x2},0`, + ].join(' ') +}) + +presetShapes.set('flowChartMagneticTape', (w, h) => { + // OOXML: Nearly full ellipse (circle) with a tape tail to the bottom-right. + // 3 quarter-arcs (270°) + partial arc of ang1 = at2(w,h) = atan2(h,w), + // then line to (r, ib) → (r, b) → close. + const wd2 = w / 2 + const hd2 = h / 2 + const hc = wd2 + const vc = hd2 + const ang1 = Math.atan2(h, w) // OOXML at2 w h = atan2(h, w) + const ib = vc + hd2 * Math.sin(Math.PI / 4) // sin(45°) * hd2 + // arcTo helper: compute SVG arc from OOXML arcTo parameters + const arcTo = ( + curX: number, + curY: number, + wR: number, + hR: number, + stDeg: number, + swDeg: number + ) => { + const stRad = (stDeg * Math.PI) / 180 + const endRad = ((stDeg + swDeg) * Math.PI) / 180 + const cx = curX - wR * Math.cos(stRad) + const cy = curY - hR * Math.sin(stRad) + const endX = cx + wR * Math.cos(endRad) + const endY = cy + hR * Math.sin(endRad) + const largeArc = Math.abs(swDeg) > 180 ? 1 : 0 + const sweep = swDeg > 0 ? 1 : 0 + return { endX, endY, svg: `A${wR},${hR} 0 ${largeArc},${sweep} ${endX},${endY}` } + } + // Start at bottom center: M(hc, b) + let curX = hc + let curY = h + const a1 = arcTo(curX, curY, wd2, hd2, 90, 90) // cd4, cd4 → 90° to 180° + curX = a1.endX + curY = a1.endY + const a2 = arcTo(curX, curY, wd2, hd2, 180, 90) // cd2, cd4 → 180° to 270° + curX = a2.endX + curY = a2.endY + const a3 = arcTo(curX, curY, wd2, hd2, 270, 90) // 3cd4, cd4 → 270° to 360° + curX = a3.endX + curY = a3.endY + const ang1Deg = (ang1 * 180) / Math.PI + const a4 = arcTo(curX, curY, wd2, hd2, 0, ang1Deg) // 0, ang1 + return [`M${hc},${h}`, a1.svg, a2.svg, a3.svg, a4.svg, `L${w},${ib}`, `L${w},${h}`, 'Z'].join(' ') +}) + +presetShapes.set('flowChartMultidocument', (w, h) => { + // OOXML: 21600-unit coordinates. Three stacked documents with cubic bezier waves. + const s = (x: number) => (w * x) / 21600 + const t = (y: number) => (h * y) / 21600 + return [ + // Front doc (bottom layer, with wave) + `M0,${t(20782)}`, + `C${s(9298)},${t(23542)} ${s(9298)},${t(18022)} ${s(18595)},${t(18022)}`, + `L${s(18595)},${t(3675)} L0,${t(3675)} Z`, + // Middle doc + `M${s(1532)},${t(3675)} L${s(1532)},${t(1815)} L${s(20000)},${t(1815)}`, + `L${s(20000)},${t(16252)}`, + `C${s(19298)},${t(16252)} ${s(18595)},${t(16352)} ${s(18595)},${t(16352)}`, + `L${s(18595)},${t(3675)} Z`, + // Back doc (top layer) + `M${s(2972)},${t(1815)} L${s(2972)},0 L${w},0`, + `L${w},${t(14392)}`, + `C${s(20800)},${t(14392)} ${s(20000)},${t(14467)} ${s(20000)},${t(14467)}`, + `L${s(20000)},${t(1815)} Z`, + ].join(' ') +}) + +// ===== Callout Shapes ===== + +presetShapes.set('wedgeRectCallout', (w, h, adjustments) => { + // OOXML spec: adaptive callout pointer on the edge closest to the tip + const hc = w / 2 + const vc = h / 2 + const dxPos = (w * (adjustments?.get('adj1') ?? -20833)) / 100000 + const dyPos = (h * (adjustments?.get('adj2') ?? 62500)) / 100000 + const xPos = hc + dxPos + const yPos = vc + dyPos + const dq = (dxPos * h) / w + const ady = Math.abs(dyPos) + const adq = Math.abs(dq) + const dz = ady - adq + // Notch bracket positions (7/12 or 2/12 depending on tip direction) + const x1 = (w * (dxPos >= 0 ? 7 : 2)) / 12 + const x2 = (w * (dxPos >= 0 ? 10 : 5)) / 12 + const y1 = (h * (dyPos >= 0 ? 7 : 2)) / 12 + const y2 = (h * (dyPos >= 0 ? 10 : 5)) / 12 + // Conditional notch points per edge (collapse to edge if not the active edge) + const xl = dz > 0 ? 0 : dxPos >= 0 ? 0 : xPos + const xt = dz > 0 ? (dyPos >= 0 ? x1 : xPos) : x1 + const xr = dz > 0 ? w : dxPos >= 0 ? xPos : w + const xb = dz > 0 ? (dyPos >= 0 ? xPos : x1) : x1 + const yl = dz > 0 ? y1 : dxPos >= 0 ? y1 : yPos + const yt = dz > 0 ? (dyPos >= 0 ? 0 : yPos) : 0 + const yr = dz > 0 ? y1 : dxPos >= 0 ? yPos : y1 + const yb = dz > 0 ? (dyPos >= 0 ? yPos : h) : h + return [ + `M0,0`, + `L${x1},0`, + `L${xt},${yt}`, + `L${x2},0`, + `L${w},0`, + `L${w},${y1}`, + `L${xr},${yr}`, + `L${w},${y2}`, + `L${w},${h}`, + `L${x2},${h}`, + `L${xb},${yb}`, + `L${x1},${h}`, + `L0,${h}`, + `L0,${y2}`, + `L${xl},${yl}`, + `L0,${y1}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('wedgeRoundRectCallout', (w, h, adjustments) => { + // OOXML spec: rounded rect with adaptive callout pointer + const hc = w / 2 + const vc = h / 2 + const ss = Math.min(w, h) + const dxPos = (w * (adjustments?.get('adj1') ?? -20833)) / 100000 + const dyPos = (h * (adjustments?.get('adj2') ?? 62500)) / 100000 + const u1 = (ss * (adjustments?.get('adj3') ?? 16667)) / 100000 + const xPos = hc + dxPos + const yPos = vc + dyPos + const dq = (dxPos * h) / w + const ady = Math.abs(dyPos) + const adq = Math.abs(dq) + const dz = ady - adq + const u2 = w - u1 + const v2 = h - u1 + const x1 = (w * (dxPos >= 0 ? 7 : 2)) / 12 + const x2 = (w * (dxPos >= 0 ? 10 : 5)) / 12 + const y1 = (h * (dyPos >= 0 ? 7 : 2)) / 12 + const y2 = (h * (dyPos >= 0 ? 10 : 5)) / 12 + const xl = dz > 0 ? 0 : dxPos >= 0 ? 0 : xPos + const xt = dz > 0 ? (dyPos >= 0 ? x1 : xPos) : x1 + const xr = dz > 0 ? w : dxPos >= 0 ? xPos : w + const xb = dz > 0 ? (dyPos >= 0 ? xPos : x1) : x1 + const yl = dz > 0 ? y1 : dxPos >= 0 ? y1 : yPos + const yt = dz > 0 ? (dyPos >= 0 ? 0 : yPos) : 0 + const yr = dz > 0 ? y1 : dxPos >= 0 ? yPos : y1 + const yb = dz > 0 ? (dyPos >= 0 ? yPos : h) : h + return [ + `M0,${u1}`, + `A${u1},${u1} 0 0,1 ${u1},0`, + `L${x1},0`, + `L${xt},${yt}`, + `L${x2},0`, + `L${u2},0`, + `A${u1},${u1} 0 0,1 ${w},${u1}`, + `L${w},${y1}`, + `L${xr},${yr}`, + `L${w},${y2}`, + `L${w},${v2}`, + `A${u1},${u1} 0 0,1 ${u2},${h}`, + `L${x2},${h}`, + `L${xb},${yb}`, + `L${x1},${h}`, + `L${u1},${h}`, + `A${u1},${u1} 0 0,1 0,${v2}`, + `L0,${y2}`, + `L${xl},${yl}`, + `L0,${y1}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('wedgeEllipseCallout', (w, h, adjustments) => { + const ax = adj(adjustments, 'adj1', -20833) + const ay = adj(adjustments, 'adj2', 62500) + const rx = w / 2 + const ry = h / 2 + const tipX = rx + w * ax + const tipY = ry + h * ay + // Approximate: ellipse with a pointer + const angle = Math.atan2(tipY - ry, tipX - rx) + const gapAngle = 0.15 + const _x1 = rx + rx * Math.cos(angle - gapAngle) + const _y1 = ry + ry * Math.sin(angle - gapAngle) + const _x2 = rx + rx * Math.cos(angle + gapAngle) + const _y2 = ry + ry * Math.sin(angle + gapAngle) + return [ + shapeArc( + rx, + ry, + rx, + ry, + ((angle + gapAngle) * 180) / Math.PI, + ((angle - gapAngle + 2 * Math.PI) * 180) / Math.PI, + false + ), + `L${tipX},${tipY}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('cloudCallout', (w, h, adjustments) => { + const ax = adj(adjustments, 'adj1', -20833) + const ay = adj(adjustments, 'adj2', 62500) + const tipX = w / 2 + w * ax + const tipY = h / 2 + h * ay + // Simplified cloud with callout circles + const cloud = presetShapes.get('cloud')!(w, h) + // Small circles leading to tip + const cx = w / 2 + const cy = h / 2 + const dx = tipX - cx + const dy = tipY - cy + const r1 = Math.min(w, h) * 0.04 + const r2 = Math.min(w, h) * 0.025 + const c1x = cx + dx * 0.5 + const c1y = cy + dy * 0.5 + const c2x = cx + dx * 0.75 + const c2y = cy + dy * 0.75 + return [ + cloud, + // Connector circles (approximated as small ellipses) + `M${c1x + r1},${c1y} A${r1},${r1} 0 1,1 ${c1x - r1},${c1y} A${r1},${r1} 0 1,1 ${c1x + r1},${c1y} Z`, + `M${c2x + r2},${c2y} A${r2},${r2} 0 1,1 ${c2x - r2},${c2y} A${r2},${r2} 0 1,1 ${c2x + r2},${c2y} Z`, + ].join(' ') +}) + +presetShapes.set('borderCallout1', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 112500)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -38333)) / 100000 + return `M0,0 L${w},0 L${w},${h} L0,${h} Z M${x1},${y1} L${x2},${y2}` +}) + +// ===== Block / 3D Shapes ===== + +presetShapes.set('cube', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 25000) + const depth = Math.min(w, h) * a + return [ + // Front face + `M0,${depth} L${w - depth},${depth} L${w - depth},${h} L0,${h} Z`, + // Top face + `M0,${depth} L${depth},0 L${w},0 L${w - depth},${depth} Z`, + // Right face + `M${w - depth},${depth} L${w},0 L${w},${h - depth} L${w - depth},${h} Z`, + ].join(' ') +}) + +// can is implemented as multiPathPreset (see multiPathPresets below) + +// ribbon2 is implemented as multiPathPreset (see multiPathPresets below) + +presetShapes.set('plus', (w, h, adjustments) => { + // OOXML: adj=25000 (max 50000), x1 = ss * a / 100000 (uses ss for both x and y) + const ss = Math.min(w, h) + const a = Math.min(Math.max(adjRaw(adjustments, 'adj', 25000), 0), 50000) + const x1 = (ss * a) / 100000 + const x2 = w - x1 + const y2 = h - x1 + return [ + `M0,${x1}`, + `L${x1},${x1}`, + `L${x1},0`, + `L${x2},0`, + `L${x2},${x1}`, + `L${w},${x1}`, + `L${w},${y2}`, + `L${x2},${y2}`, + `L${x2},${h}`, + `L${x1},${h}`, + `L${x1},${y2}`, + `L0,${y2}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('heart', (w, h) => { + // OOXML spec: two cubic beziers from (hc, hd4) through (hc, b) and back. + // dx1 = w*49/48 (slightly wider than w/2), dx2 = w*10/48 + // y1 = t - hd3 (above top edge) + const hc = w / 2 + const hd4 = h / 4 + const hd3 = h / 3 + const dx1 = (w * 49) / 48 + const dx2 = (w * 10) / 48 + const x1 = hc - dx1 // far left control + const x2 = hc - dx2 // inner left control + const x3 = hc + dx2 // inner right control + const x4 = hc + dx1 // far right control + const y1 = -hd3 // above top (negative y) + return [ + `M${hc},${hd4}`, + `C${x3},${y1} ${x4},${hd4} ${hc},${h}`, + `C${x1},${hd4} ${x2},${y1} ${hc},${hd4}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('cloud', (w, h) => { + // OOXML cloud: 11 arcTo operations in 43200×43200 coordinate space + const sx = w / 43200 + const sy = h / 43200 + // OOXML arcTo: wR/hR are radii, stAng/swAng in 60000ths of degree + const arcs: [number, number, number, number][] = [ + [6753, 9190, -11429249, 7426832], + [5333, 7267, -8646143, 5396714], + [4365, 5945, -8748475, 5983381], + [4857, 6595, -7859164, 7034504], + [5333, 7273, -4722533, 6541615], + [6775, 9220, -2776035, 7816140], + [5785, 7867, 37501, 6842000], + [6752, 9215, 1347096, 6910353], + [7720, 10543, 3974558, 4542661], + [4360, 5918, -16496525, 8804134], + [4345, 5945, -14809710, 9151131], + ] + let curX = 3900 * sx + let curY = 14370 * sy + const parts = [`M${curX},${curY}`] + // Track position in unscaled 43200×43200 space for accurate arcTo computation. + // OOXML arcTo angles are visual (geometric ray) angles in the path coordinate space. + // Convert to parametric before computing center/endpoint positions. + let ux = 3900 + let uy = 14370 // unscaled current position + for (const [wR, hR, stAng60k, swAng60k] of arcs) { + const stDeg = stAng60k / 60000 + const swDeg = swAng60k / 60000 + // Visual→parametric using UNSCALED radii (path coordinate space) + const stVisRad = (stDeg * Math.PI) / 180 + const stRad = Math.atan2(wR * Math.sin(stVisRad), hR * Math.cos(stVisRad)) + const endVisRad = ((stDeg + swDeg) * Math.PI) / 180 + const endRad = Math.atan2(wR * Math.sin(endVisRad), hR * Math.cos(endVisRad)) + // Compute center and endpoint in unscaled space + const acx = ux - wR * Math.cos(stRad) + const acy = uy - hR * Math.sin(stRad) + const endUX = acx + wR * Math.cos(endRad) + const endUY = acy + hR * Math.sin(endRad) + // Scale to pixel space for SVG output + const endX = endUX * sx + const endY = endUY * sy + const rwS = wR * sx + const rhS = hR * sy + const largeArc = Math.abs(swDeg) > 180 ? 1 : 0 + const sweep = swDeg > 0 ? 1 : 0 + parts.push(`A${rwS},${rhS} 0 ${largeArc},${sweep} ${endX},${endY}`) + ux = endUX + uy = endUY + curX = endX + curY = endY + } + parts.push('Z') + return parts.join(' ') +}) + +// ===== Frame, Donut, Misc ===== + +presetShapes.set('frame', (w, h, adjustments) => { + const a = adj(adjustments, 'adj1', 12500) + const t = Math.min(w, h) * a + return [ + // Outer rectangle + `M0,0 L${w},0 L${w},${h} L0,${h} Z`, + // Inner rectangle (counter-clockwise for hole) + `M${t},${t} L${t},${h - t} L${w - t},${h - t} L${w - t},${t} Z`, + ].join(' ') +}) + +presetShapes.set('halfFrame', (w, h, adjustments) => { + // OOXML spec defaults: adj1=33333, adj2=33333 + const adj1Raw = adjustments?.get('adj1') ?? 33333 + const adj2Raw = adjustments?.get('adj2') ?? 33333 + const minWH = Math.min(w, h) + const a2 = Math.max(0, Math.min(adj2Raw, (100000 * w) / Math.max(minWH, 1))) + const x1 = (minWH * a2) / 100000 + const g1 = (h * x1) / Math.max(w, 1) + const g2 = h - g1 + const a1 = Math.max(0, Math.min(adj1Raw, (100000 * g2) / Math.max(minWH, 1))) + const y1 = (minWH * a1) / 100000 + const x2 = w - (y1 * w) / Math.max(h, 1) + const y2 = h - (x1 * h) / Math.max(w, 1) + return [`M0,0`, `L${w},0`, `L${x2},${y1}`, `L${x1},${y1}`, `L${x1},${y2}`, `L0,${h}`, 'Z'].join( + ' ' + ) +}) + +presetShapes.set('donut', (w, h, adjustments) => { + // OOXML: adj=25000, dr = ss * a / 100000, inner radii = wd2-dr, hd2-dr + const ss = Math.min(w, h) + const a = Math.min(Math.max(adjRaw(adjustments, 'adj', 25000), 0), 50000) + const dr = (ss * a) / 100000 + const rx = w / 2 + const ry = h / 2 + const iwd2 = Math.max(0, rx - dr) + const ihd2 = Math.max(0, ry - dr) + return [ + // Outer circle (CW) + `M0,${ry}`, + `A${rx},${ry} 0 1,1 ${w},${ry}`, + `A${rx},${ry} 0 1,1 0,${ry}`, + 'Z', + // Inner circle (CCW for evenodd hole) + `M${dr},${ry}`, + `A${iwd2},${ihd2} 0 1,0 ${w - dr},${ry}`, + `A${iwd2},${ihd2} 0 1,0 ${dr},${ry}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('noSmoking', (w, h, adjustments) => { + // OOXML: adj=18750. Ring thickness = ss*a/100000. Diagonal band via inner ellipse arcs + evenodd. + const ss = Math.min(w, h) + const a = Math.min(Math.max(adjRaw(adjustments, 'adj', 18750), 0), 50000) + const dr = (ss * a) / 100000 + const rx = w / 2 + const ry = h / 2 + const hc = w / 2 + const vc = h / 2 + const iwd2 = rx - dr + const ihd2 = ry - dr + // Compute diagonal angle and band intersection with inner ellipse + const ang = Math.atan2(h, w) // at2(w, h) in OOXML: at2 x y = atan2(y, x) + // Inner ellipse radius at diagonal angle + const ct = ihd2 * Math.cos(ang) + const st = iwd2 * Math.sin(ang) + const m = Math.sqrt(ct * ct + st * st) || 1 + const n = (iwd2 * ihd2) / m + const drd2 = dr / 2 + const dang = Math.atan2(drd2, n) + const dang2 = dang * 2 + // Sweep for inner arcs: -(180° - dang2) expressed as OOXML 60000ths then converted + const swAngRad = -(Math.PI - dang2) + const stAng1 = ang - dang + const stAng2 = stAng1 - Math.PI + // Compute points on inner ellipse for the two diagonal band arcs + const innerPt = (angle: number) => { + const ct2 = ihd2 * Math.cos(angle) + const st2 = iwd2 * Math.sin(angle) + const m2 = Math.sqrt(ct2 * ct2 + st2 * st2) || 1 + const n2 = (iwd2 * ihd2) / m2 + return { x: hc + n2 * Math.cos(angle), y: vc + n2 * Math.sin(angle) } + } + const p1 = innerPt(stAng1) + const p2 = innerPt(stAng2) + // End points of arcs + const endAng1 = stAng1 + swAngRad + const endAng2 = stAng2 + swAngRad + const e1 = innerPt(endAng1) + const e2 = innerPt(endAng2) + const largeArc = Math.abs(swAngRad) > Math.PI ? 1 : 0 + const sweep = swAngRad > 0 ? 1 : 0 + return [ + // Outer circle (CW) + `M0,${vc}`, + `A${rx},${ry} 0 1,1 ${w},${vc}`, + `A${rx},${ry} 0 1,1 0,${vc}`, + 'Z', + // First diagonal band arc (inner ellipse) + `M${p1.x},${p1.y}`, + `A${iwd2},${ihd2} 0 ${largeArc},${sweep} ${e1.x},${e1.y}`, + 'Z', + // Second diagonal band arc (opposite quadrant) + `M${p2.x},${p2.y}`, + `A${iwd2},${ihd2} 0 ${largeArc},${sweep} ${e2.x},${e2.y}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('blockArc', (w, h, adjustments) => { + const adj1Raw = adjustments?.get('adj1') ?? 10800000 // start angle + const adj2Raw = adjustments?.get('adj2') ?? 0 // sweep/end angle + const adj3Raw = adjustments?.get('adj3') ?? 25000 // thickness ratio + const startDeg = Math.min(Math.max(adj1Raw / 60000, 0), 360) + const innerStartDeg = Math.min(Math.max(adj2Raw / 60000, 0), 360) + const sweepDeg = (innerStartDeg - startDeg + 360) % 360 || 360 + const endDeg = startDeg + sweepDeg + const innerEndDeg = innerStartDeg - sweepDeg + const wd2 = w / 2 + const hd2 = h / 2 + const dr = (Math.min(w, h) * Math.max(0, Math.min(adj3Raw, 50000))) / 100000 + const iwd2 = Math.max(1, wd2 - dr) + const ihd2 = Math.max(1, hd2 - dr) + const p = (cx: number, cy: number, rx: number, ry: number, deg: number) => { + const r = (deg * Math.PI) / 180 + return { x: cx + rx * Math.cos(r), y: cy + ry * Math.sin(r) } + } + const oStart = p(wd2, hd2, wd2, hd2, startDeg) + const oEnd = p(wd2, hd2, wd2, hd2, endDeg) + const iStart = p(wd2, hd2, iwd2, ihd2, innerStartDeg) + const iEnd = p(wd2, hd2, iwd2, ihd2, innerEndDeg) + const largeArc = sweepDeg > 180 ? 1 : 0 + + return [ + `M${oStart.x},${oStart.y}`, + `A${wd2},${hd2} 0 ${largeArc},1 ${oEnd.x},${oEnd.y}`, + `L${iStart.x},${iStart.y}`, + `A${iwd2},${ihd2} 0 ${largeArc},0 ${iEnd.x},${iEnd.y}`, + 'Z', + ].join(' ') +}) + +// ===== Gear Shapes ===== + +presetShapes.set('gear6', (w, h, adjustments) => { + const a1 = adjustments?.get('adj1') ?? 15000 + const a2 = adjustments?.get('adj2') ?? 3526 + return gearShape(w, h, 6, a1, a2) +}) + +presetShapes.set('gear9', (w, h, adjustments) => { + const a1 = adjustments?.get('adj1') ?? 10000 + const a2 = adjustments?.get('adj2') ?? 1763 + return gearShape(w, h, 9, a1, a2) +}) + +function gearShape(w: number, h: number, teeth: number, adj1Raw: number, adj2Raw: number): string { + // Gear shape: teeth protrude from inner ellipse by th, narrowed by lFD at tips. + // Uses per-tooth edge-perpendicular computation for B/C tip direction. + const cx = w / 2 + const cy = h / 2 + const ss = Math.min(w, h) + const maxAdj2 = teeth === 6 ? 5358 : 2679 + const a1v = Math.min(Math.max(adj1Raw, 0), 20000) + const a2v = Math.min(Math.max(adj2Raw, 0), maxAdj2) + const th = (ss * a1v) / 100000 // tooth height + const lFD = (ss * a2v) / 100000 // tooth flat distance offset + + const rw = w / 2 - th // inner ellipse width radius + const rh = h / 2 - th // inner ellipse height radius + if (rw <= 0 || rh <= 0) return `M0,0 L${w},0 L${w},${h} L0,${h} Z` + + // OOXML: ha = at2(maxr, l3) where maxr=min(rw,rh), l3=th/2+lFD/2 + const l3 = th / 2 + lFD / 2 + const maxr = Math.min(rw, rh) + const ha = Math.atan2(l3, maxr) // half-angle of each tooth on the inner ellipse + + const centerDegs = + teeth === 6 ? [330, 30, 90, 150, 210, 270] : [310, 350, 30, 70, 110, 150, 190, 230, 270] + + const parts: string[] = [] + + for (let i = 0; i < centerDegs.length; i++) { + const baseAngle = (centerDegs[i] * Math.PI) / 180 + const aStart = baseAngle - ha // tooth base start angle (A point) + const aEnd = baseAngle + ha // tooth base end angle (D point) + + // A and D: inner ellipse points at tooth base edges + const ax = cx + rw * Math.cos(aStart) + const ay = cy + rh * Math.sin(aStart) + const dx = cx + rw * Math.cos(aEnd) + const dy = cy + rh * Math.sin(aEnd) + + // Per-tooth edge-perpendicular tip computation: + // Edge direction A→D + const edgeX = dx - ax + const edgeY = dy - ay + const edgeLen = Math.sqrt(edgeX * edgeX + edgeY * edgeY) + + // Unit normal perpendicular to edge, pointing outward + // For clockwise winding (our standard), outward normal is (-edgeY, edgeX) / len + // Verify with radial dot product and flip if needed + let nx = -edgeY / edgeLen + let ny = edgeX / edgeLen + const radX = Math.cos(baseAngle) + const radY = Math.sin(baseAngle) + if (nx * radX + ny * radY < 0) { + nx = -nx + ny = -ny + } + + // Narrowing: slide A and D inward along edge by lFD + const ex = edgeLen > 0 ? edgeX / edgeLen : 0 + const ey = edgeLen > 0 ? edgeY / edgeLen : 0 + const axN = ax + ex * lFD // A narrowed (moved toward D) + const ayN = ay + ey * lFD + const dxN = dx - ex * lFD // D narrowed (moved toward A) + const dyN = dy - ey * lFD + + // B and C: tip points = narrowed base + th * outward normal + const bx = axN + nx * th + const by = ayN + ny * th + const _cx = dxN + nx * th + const _cy = dyN + ny * th + + if (i === 0) { + // Start at the valley before first tooth + const prevEnd = (centerDegs[centerDegs.length - 1] * Math.PI) / 180 + ha + const prevIx = cx + rw * Math.cos(prevEnd) + const prevIy = cy + rh * Math.sin(prevEnd) + parts.push(`M${prevIx},${prevIy}`) + parts.push(`A${rw},${rh} 0 0,1 ${ax},${ay}`) + } + + // Tooth: A→B→C→D + parts.push(`L${bx},${by}`) + parts.push(`L${_cx},${_cy}`) + parts.push(`L${dx},${dy}`) + + // Arc along inner ring to next tooth + if (i < centerDegs.length - 1) { + const nextStart = (centerDegs[i + 1] * Math.PI) / 180 - ha + const nx2 = cx + rw * Math.cos(nextStart) + const ny2 = cy + rh * Math.sin(nextStart) + parts.push(`A${rw},${rh} 0 0,1 ${nx2},${ny2}`) + } + } + parts.push('Z') + return parts.join(' ') +} + +// ===== Misc Shapes ===== + +presetShapes.set('mathPlus', (w, h, adjustments) => { + // OOXML: adj1=23520 (max 73490). dx1 = w*73490/200000, dx2 = ss*a/200000 + const ss = Math.min(w, h) + const a1 = Math.min(Math.max(adjRaw(adjustments, 'adj', 23520), 0), 73490) + const dx1 = (w * 73490) / 200000 + const dy1 = (h * 73490) / 200000 + const dx2 = (ss * a1) / 200000 + const hc = w / 2 + const vc = h / 2 + const x1 = hc - dx1 + const x2 = hc - dx2 + const x3 = hc + dx2 + const x4 = hc + dx1 + const y1 = vc - dy1 + const y2 = vc - dx2 + const y3 = vc + dx2 + const y4 = vc + dy1 + return [ + `M${x1},${y2}`, + `L${x2},${y2}`, + `L${x2},${y1}`, + `L${x3},${y1}`, + `L${x3},${y2}`, + `L${x4},${y2}`, + `L${x4},${y3}`, + `L${x3},${y3}`, + `L${x3},${y4}`, + `L${x2},${y4}`, + `L${x2},${y3}`, + `L${x1},${y3}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('mathMinus', (w, h, adjustments) => { + // OOXML: adj1=23520 (max 100000). dy1 = h*a1/200000, dx1 = w*73490/200000 + const a1 = Math.min(Math.max(adjRaw(adjustments, 'adj1', 23520), 0), 100000) + const dy1 = (h * a1) / 200000 + const dx1 = (w * 73490) / 200000 + const hc = w / 2 + const vc = h / 2 + const x1 = hc - dx1 + const x2 = hc + dx1 + const y1 = vc - dy1 + const y2 = vc + dy1 + return `M${x1},${y1} L${x2},${y1} L${x2},${y2} L${x1},${y2} Z` +}) + +presetShapes.set('mathMultiply', (w, h, adjustments) => { + // OOXML: adj1=23520 (max 51965). X shape with diagonal arms. + // Key: a = at2 w h → atan2(w, h), coordinates are absolute from top-left. + const ss = Math.min(w, h) + const hc = w / 2 + const vc = h / 2 + const a1 = Math.min(Math.max(adjRaw(adjustments, 'adj1', 23520), 0), 51965) + const th = (ss * a1) / 100000 + const a = Math.atan2(h, w) + const sa = Math.sin(a) + const ca = Math.cos(a) + const ta = sa / ca // tan(a) + const dl = Math.sqrt(w * w + h * h) + const rw = (dl * 51965) / 100000 + const lM = dl - rw + // xM, yM: half-distance along the diagonal from the outer tip to the outer tip + const xM = (ca * lM) / 2 + const yM = (sa * lM) / 2 + // Perpendicular offset for arm thickness + const dxAM = (sa * th) / 2 + const dyAM = (ca * th) / 2 + // xA, yA = upper-left outer tip (left side of arm), coordinates from (0,0) + const xA = xM - dxAM + const yA = yM + dyAM + const xB = xM + dxAM + const yB = yM - dyAM + // yC = center notch: where the inner edge of one arm meets the inner edge of the other + const xBC = hc - xB + const yBC = xBC * ta + const yC = yBC + yB + // Mirror points for upper-right quadrant + const xD = w - xB + const xE = w - xA + // xF: where the arm inner edge meets vc (center y) + const yFE = vc - yA + const xFE = yFE / ta + const xF = xE - xFE + const xL = xA + xFE + // Bottom half mirrors + const yG = h - yA + const yH = h - yB + const yI = h - yC + return [ + `M${xA},${yA}`, + `L${xB},${yB}`, + `L${hc},${yC}`, + `L${xD},${yB}`, + `L${xE},${yA}`, + `L${xF},${vc}`, + `L${xE},${yG}`, + `L${xD},${yH}`, + `L${hc},${yI}`, + `L${xB},${yH}`, + `L${xA},${yG}`, + `L${xL},${vc}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('mathDivide', (w, h, adjustments) => { + const adj1 = adjustments?.get('adj1') ?? 23520 + const adj2 = adjustments?.get('adj2') ?? 5880 + const adj3 = adjustments?.get('adj3') ?? 11760 + + const a1 = Math.min(Math.max(adj1, 1000), 36745) + const maxAdj3 = Math.min((73490 - a1) / 4, (36745 * w) / Math.max(h, 1)) + const a3 = Math.min(Math.max(adj3, 1000), maxAdj3) + const maxAdj2 = 73490 - 4 * a3 - a1 + const a2 = Math.min(Math.max(adj2, 0), maxAdj2) + + const hc = w / 2 + const vc = h / 2 + const dy1 = (h * a1) / 200000 + const yg = (h * a2) / 100000 + const rad = (h * a3) / 100000 + const dx1 = (w * 73490) / 200000 + const y3 = vc - dy1 + const y4 = vc + dy1 + const y2 = y3 - (yg + rad) + const y1 = y2 - rad + const y5 = h - y1 + const x1 = hc - dx1 + const x3 = hc + dx1 + + return [ + // Top dot + `M${hc + rad},${y1 + rad} A${rad},${rad} 0 1,1 ${hc - rad},${y1 + rad} A${rad},${rad} 0 1,1 ${hc + rad},${y1 + rad} Z`, + // Bottom dot + `M${hc + rad},${y5 - rad} A${rad},${rad} 0 1,1 ${hc - rad},${y5 - rad} A${rad},${rad} 0 1,1 ${hc + rad},${y5 - rad} Z`, + // Bar + `M${x1},${y3} L${x3},${y3} L${x3},${y4} L${x1},${y4} Z`, + ].join(' ') +}) + +presetShapes.set('mathEqual', (w, h, adjustments) => { + // OOXML: adj1=23520 (bar thickness, max 36745), adj2=11760 (gap, max 100000-2*a1) + const adj1Raw = adjustments?.get('adj1') ?? 23520 + const adj2Raw = adjustments?.get('adj2') ?? 11760 + const a1 = Math.min(Math.max(adj1Raw, 0), 36745) + const mAdj2 = 100000 - a1 * 2 + const a2 = Math.min(Math.max(adj2Raw, 0), Math.max(mAdj2, 0)) + const dy1 = (h * a1) / 100000 + const dy2 = (h * a2) / 200000 + const dx1 = (w * 73490) / 200000 + const hc = w / 2 + const vc = h / 2 + const y2 = vc - dy2 // center of top bar + const y3 = vc + dy2 // center of bottom bar + const y1 = y2 - dy1 // top of top bar + const y4 = y3 + dy1 // bottom of bottom bar + const x1 = hc - dx1 + const x2 = hc + dx1 + return [ + `M${x1},${y1} L${x2},${y1} L${x2},${y2} L${x1},${y2} Z`, + `M${x1},${y3} L${x2},${y3} L${x2},${y4} L${x1},${y4} Z`, + ].join(' ') +}) + +presetShapes.set('mathNotEqual', (w, h, adjustments) => { + // Follow OOXML mathNotEqual geometry (single closed contour), which keeps + // bar thickness/slash width and intersections aligned with PowerPoint. + const adj1Raw = adjustments?.get('adj1') ?? 23520 + const adj2Raw = adjustments?.get('adj2') + const adj3Raw = adjustments?.get('adj3') ?? 11760 + + const hc = w / 2 + const vc = h / 2 + const hd2 = h / 2 + + const a1 = Math.min(Math.max(adj1Raw, 0), 50000) + const crAng = (() => { + if (adj2Raw === undefined) return (110 * Math.PI) / 180 + const rad = ((adj2Raw / 60000) * Math.PI) / 180 + const min = (70 * Math.PI) / 180 + const max = (110 * Math.PI) / 180 + return Math.min(Math.max(rad, min), max) + })() + + const maxAdj3 = 100000 - a1 * 2 + const a3 = Math.min(Math.max(adj3Raw, 0), maxAdj3) + + const dy1 = (h * a1) / 100000 + const dy2 = (h * a3) / 200000 + const dx1 = (w * 73490) / 200000 + const x1 = hc - dx1 + const x8 = hc + dx1 + const y2 = vc - dy2 + const y3 = vc + dy2 + const y1 = y2 - dy1 + const y4 = y3 + dy1 + + const cadj2 = crAng - Math.PI / 2 + const xadj2 = hd2 * Math.tan(cadj2) + const len = Math.hypot(xadj2, hd2) || 1 + const bhw = (len * dy1) / hd2 + const bhw2 = bhw / 2 + const x7 = hc + xadj2 - bhw2 + const x6 = x7 - (xadj2 * y1) / hd2 + const x5 = x7 - (xadj2 * y2) / hd2 + const x4 = x7 - (xadj2 * y3) / hd2 + const x3 = x7 - (xadj2 * y4) / hd2 + const rx7 = x7 + bhw + const rx6 = x6 + bhw + const rx5 = x5 + bhw + const rx4 = x4 + bhw + const rx3 = x3 + bhw + + const dx7 = (dy1 * hd2) / len + const rx = cadj2 > 0 ? x7 + dx7 : rx7 + const lx = cadj2 > 0 ? x7 : rx7 - dx7 + const dy3 = (dy1 * xadj2) / len + const ry = cadj2 > 0 ? dy3 : 0 + const ly = cadj2 > 0 ? 0 : -dy3 + const dlx = w - rx + const drx = w - lx + const dly = h - ry + const dry = h - ly + + return [ + `M${x1},${y1}`, + `L${x6},${y1}`, + `L${lx},${ly}`, + `L${rx},${ry}`, + `L${rx6},${y1}`, + `L${x8},${y1}`, + `L${x8},${y2}`, + `L${rx5},${y2}`, + `L${rx4},${y3}`, + `L${x8},${y3}`, + `L${x8},${y4}`, + `L${rx3},${y4}`, + `L${drx},${dry}`, + `L${dlx},${dly}`, + `L${x3},${y4}`, + `L${x1},${y4}`, + `L${x1},${y3}`, + `L${x4},${y3}`, + `L${x5},${y2}`, + `L${x1},${y2}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('round1Rect', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 16667) + const r = Math.min(w, h) * a + return [`M0,0`, `L${w - r},0`, `A${r},${r} 0 0,1 ${w},${r}`, `L${w},${h}`, `L0,${h}`, 'Z'].join( + ' ' + ) +}) + +presetShapes.set('round2SameRect', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 16667) + const a2 = adj(adjustments, 'adj2', 0) + const r1 = Math.min(w, h) * a1 + const r2 = Math.min(w, h) * a2 + return [ + `M${r1},0`, + `L${w - r1},0`, + `A${r1},${r1} 0 0,1 ${w},${r1}`, + `L${w},${h - r2}`, + `A${r2},${r2} 0 0,1 ${w - r2},${h}`, + `L${r2},${h}`, + `A${r2},${r2} 0 0,1 0,${h - r2}`, + `L0,${r1}`, + `A${r1},${r1} 0 0,1 ${r1},0`, + 'Z', + ].join(' ') +}) + +presetShapes.set('round2DiagRect', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 16667) + const a2 = adj(adjustments, 'adj2', 0) + const r1 = Math.min(w, h) * a1 + const r2 = Math.min(w, h) * a2 + return [ + `M${r1},0`, + `L${w},0`, + `L${w},${h - r2}`, + `A${r2},${r2} 0 0,1 ${w - r2},${h}`, + `L0,${h}`, + `L0,${r1}`, + `A${r1},${r1} 0 0,1 ${r1},0`, + 'Z', + ].join(' ') +}) + +presetShapes.set('snip1Rect', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 16667) + const d = Math.min(w, h) * a + return `M0,0 L${w - d},0 L${w},${d} L${w},${h} L0,${h} Z` +}) + +presetShapes.set('snip2SameRect', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 16667) + const a2 = adj(adjustments, 'adj2', 0) + const d1 = Math.min(w, h) * a1 + const d2 = Math.min(w, h) * a2 + return `M${d1},0 L${w - d1},0 L${w},${d1} L${w},${h - d2} L${w - d2},${h} L${d2},${h} L0,${h - d2} L0,${d1} Z` +}) + +presetShapes.set('snip2DiagRect', (w, h, adjustments) => { + // OOXML spec: diagonal snipped rectangle. adj1=top-left/bottom-right, adj2=top-right/bottom-left + const ss = Math.min(w, h) + const a1 = Math.min(Math.max(adjustments?.get('adj1') ?? 0, 0), 50000) + const a2 = Math.min(Math.max(adjustments?.get('adj2') ?? 16667, 0), 50000) + const lx1 = (ss * a1) / 100000 + const lx2 = w - lx1 + const ly1 = h - lx1 + const rx1 = (ss * a2) / 100000 + const rx2 = w - rx1 + const ry1 = h - rx1 + return `M${lx1},0 L${rx2},0 L${w},${rx1} L${w},${ly1} L${lx2},${h} L${rx1},${h} L0,${ry1} L0,${lx1} Z` +}) + +presetShapes.set('snipRoundRect', (w, h, adjustments) => { + const a1 = adj(adjustments, 'adj1', 16667) + const a2 = adj(adjustments, 'adj2', 16667) + const r = Math.min(w, h) * a1 + const d = Math.min(w, h) * a2 + return [ + `M${r},0`, + `L${w - d},0`, + `L${w},${d}`, + `L${w},${h}`, + `L0,${h}`, + `L0,${r}`, + `A${r},${r} 0 0,1 ${r},0`, + 'Z', + ].join(' ') +}) + +presetShapes.set('bevel', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 12500) + const t = Math.min(w, h) * a + return [ + // Outer + `M0,0 L${w},0 L${w},${h} L0,${h} Z`, + // Inner + `M${t},${t} L${t},${h - t} L${w - t},${h - t} L${w - t},${t} Z`, + // Connecting triangles (top) + `M0,0 L${w},0 L${w - t},${t} L${t},${t} Z`, + // Right + `M${w},0 L${w},${h} L${w - t},${h - t} L${w - t},${t} Z`, + // Bottom + `M${w},${h} L0,${h} L${t},${h - t} L${w - t},${h - t} Z`, + // Left + `M0,${h} L0,0 L${t},${t} L${t},${h - t} Z`, + ].join(' ') +}) + +presetShapes.set('foldedCorner', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 16667) + const fold = Math.min(w, h) * a * 0.7 + return [ + `M0,0 L${w},0 L${w},${h} L0,${h} Z`, + // Fold triangle + `M${w - fold},${h} L${w},${h} L${w},${h - fold}`, + ].join(' ') +}) + +// smileyFace is implemented as multiPathPreset (see multiPathPresets below) + +presetShapes.set('sun', (w, h, adjustments) => { + // OOXML spec: adj default=25000, pinned 12500..46875 + const adjRaw = adjustments?.get('adj') ?? 25000 + const a = Math.min(Math.max(adjRaw, 12500), 46875) + const g0 = 50000 - a + // OOXML guide formulas + const g1 = (g0 * 30274) / 32768 + const g2 = (g0 * 12540) / 32768 + const _g3 = g1 + 50000 + const _g4 = g2 + 50000 + const g5 = 50000 - g1 + const g6 = 50000 - g2 + const g7 = (g0 * 23170) / 32768 + const g8 = 50000 + g7 + const g9 = 50000 - g7 + const g10 = (g5 * 3) / 4 + const g11 = (g6 * 3) / 4 + const g12 = g10 + 3662 + const g13 = g11 + 3662 + const g14 = g11 + 12500 + const g15 = 100000 - g10 + const g16 = 100000 - g12 + const g17 = 100000 - g13 + const g18 = 100000 - g14 + // Pixel coordinates + const hc = w / 2 + const vc = h / 2 + const ox1 = (w * 18436) / 21600 + const oy1 = (h * 3163) / 21600 + const ox2 = (w * 3163) / 21600 + const oy2 = (h * 18436) / 21600 + const s = (pct: number, dim: number) => (dim * pct) / 100000 + const _x8 = s(g8, w) + const _x9 = s(g9, w) + const x10 = s(g10, w) + const x12 = s(g12, w) + const x13 = s(g13, w) + const x14 = s(g14, w) + const x15 = s(g15, w) + const x16 = s(g16, w) + const x17 = s(g17, w) + const x18 = s(g18, w) + const wR = s(g0, w) + const hR = s(g0, h) + const _y8 = s(g8, h) + const _y9 = s(g9, h) + const y10 = s(g10, h) + const y12 = s(g12, h) + const y13 = s(g13, h) + const y14 = s(g14, h) + const y15 = s(g15, h) + const y16 = s(g16, h) + const y17 = s(g17, h) + const y18 = s(g18, h) + const x19 = s(a, w) + return [ + // Ray 0: right + `M${w},${vc} L${x15},${y18} L${x15},${y14} Z`, + // Ray 1: top-right + `M${ox1},${oy1} L${x16},${y13} L${x17},${y12} Z`, + // Ray 2: top + `M${hc},0 L${x18},${y10} L${x14},${y10} Z`, + // Ray 3: top-left + `M${ox2},${oy1} L${x13},${y12} L${x12},${y13} Z`, + // Ray 4: left + `M0,${vc} L${x10},${y14} L${x10},${y18} Z`, + // Ray 5: bottom-left + `M${ox2},${oy2} L${x12},${y17} L${x13},${y16} Z`, + // Ray 6: bottom + `M${hc},${h} L${x14},${y15} L${x18},${y15} Z`, + // Ray 7: bottom-right + `M${ox1},${oy2} L${x17},${y16} L${x16},${y17} Z`, + // Center ellipse (arcTo from x19,vc with wR,hR, startAngle=180°, sweep=360°) + `M${x19},${vc}`, + `A${wR},${hR} 0 1,1 ${x19 + 2 * wR},${vc}`, + `A${wR},${hR} 0 1,1 ${x19},${vc}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('moon', (w, h, adjustments) => { + if (w <= 0 || h <= 0) return `M0,0 L${w},0 L${w},${h} L0,${h} Z` + // OOXML moon: outer semicircle (rx=w, ry=h/2) + inner semicircle (rx=g18w, ry=dy1). + // Both arcs share endpoints (w,0) and (w,h). Inner ellipse centered at (g0w+g18w, h/2). + const ss = Math.min(w, h) + const hd2 = h / 2 + const a = Math.min(Math.max(adjustments?.get('adj') ?? 50000, 0), 87500) + const g0 = (ss * a) / 100000 + const g1 = ss - g0 + if (g1 <= 0) return `M0,0 L${w},0 L${w},${h} L0,${h} Z` + const g0w = (g0 * w) / ss + const g5 = (2 * ss * ss - g0 * g0) / g1 + const g6w = ((g5 - g0) * w) / ss + const g8 = g5 / 2 - g0 + const dy1 = (g8 * hd2) / ss + const g18w = (g6w - g0w) / 2 + return [ + `M${w},${h}`, + `A${w},${hd2} 0 0,1 ${w},0`, // outer: (w,h) → left semicircle → (w,0) + `A${g18w},${dy1} 0 0,0 ${w},${h}`, // inner: (w,0) → concave arc → (w,h) + 'Z', + ].join(' ') +}) + +presetShapes.set('lightningBolt', (w, h) => { + // Calibrated against OOXML preset rendering (PowerPoint PDF export): + // the old simplified 7-point bolt was too wide and lacked the inner notches. + // This normalized 11-point contour follows the default lightningBolt geometry. + return [ + `M${w * 0.3895},${h * 0.0}`, + `L${w * 0.0},${h * 0.1821}`, + `L${w * 0.3425},${h * 0.3845}`, + `L${w * 0.2265},${h * 0.4452}`, + `L${w * 0.5497},${h * 0.6391}`, + `L${w * 0.453},${h * 0.683}`, + `L${w * 0.9972},${h * 0.9983}`, + `L${w * 0.6796},${h * 0.5919}`, + `L${w * 0.7624},${h * 0.5514}`, + `L${w * 0.5138},${h * 0.3153}`, + `L${w * 0.5939},${h * 0.2816}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('bracketPair', (w, h, adjustments) => { + // OOXML: adj=16667 (max 50000), radius = ss * a / 100000 + const ss = Math.min(w, h) + const a = Math.min(Math.max(adjRaw(adjustments, 'adj', 16667), 0), 50000) + const r = (ss * a) / 100000 + const x2 = w - r + const y2 = h - r + return [ + // Left bracket: bottom-left arc → vertical → top-left arc + `M${r},${h}`, + `A${r},${r} 0 0,1 0,${y2}`, + `L0,${r}`, + `A${r},${r} 0 0,1 ${r},0`, + // Right bracket: top-right arc → vertical → bottom-right arc + `M${x2},0`, + `A${r},${r} 0 0,1 ${w},${r}`, + `L${w},${y2}`, + `A${r},${r} 0 0,1 ${x2},${h}`, + ].join(' ') +}) + +presetShapes.set('bracePair', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 8333) + const r = Math.min(w, h) * a + const cy = h / 2 + return [ + // Left brace + `M${r * 2},0`, + `A${r},${r} 0 0,0 ${r},${r}`, + `L${r},${cy - r}`, + `A${r},${r} 0 0,1 0,${cy}`, + `A${r},${r} 0 0,1 ${r},${cy + r}`, + `L${r},${h - r}`, + `A${r},${r} 0 0,0 ${r * 2},${h}`, + // Right brace + `M${w - r * 2},0`, + `A${r},${r} 0 0,1 ${w - r},${r}`, + `L${w - r},${cy - r}`, + `A${r},${r} 0 0,0 ${w},${cy}`, + `A${r},${r} 0 0,0 ${w - r},${cy + r}`, + `L${w - r},${h - r}`, + `A${r},${r} 0 0,1 ${w - r * 2},${h}`, + ].join(' ') +}) + +presetShapes.set('leftBracket', (w, h, adjustments) => { + const ss = Math.min(w, h) + const maxAdj = ss > 0 ? (50000 * h) / ss : 0 + const a = Math.max(0, Math.min(adjustments?.get('adj') ?? 8333, maxAdj)) + const y1 = (ss * a) / 100000 + const toDeg = (ooxmlAng: number) => ooxmlAng / 60000 + const arcFrom = ( + x0: number, + y0: number, + rx: number, + ry: number, + stAng: number, + swAng: number + ) => { + const st = (toDeg(stAng) * Math.PI) / 180 + const sw = (toDeg(swAng) * Math.PI) / 180 + const cx = x0 - rx * Math.cos(st) + const cy = y0 - ry * Math.sin(st) + const x1 = cx + rx * Math.cos(st + sw) + const y1p = cy + ry * Math.sin(st + sw) + const large = Math.abs(toDeg(swAng)) > 180 ? 1 : 0 + const sweep = swAng >= 0 ? 1 : 0 + return { cmd: `A${rx},${ry} 0 ${large},${sweep} ${x1},${y1p}`, x: x1, y: y1p } + } + + const a1 = arcFrom(w, h, w, y1, 5400000, 5400000) // cd4, cd4 + const a2 = arcFrom(0, y1, w, y1, 10800000, 5400000) // cd2, cd4 + return [`M${w},${h}`, a1.cmd, `L0,${y1}`, a2.cmd].join(' ') +}) + +presetShapes.set('rightBracket', (w, h, adjustments) => { + const ss = Math.min(w, h) + const maxAdj = ss > 0 ? (50000 * h) / ss : 0 + const a = Math.max(0, Math.min(adjustments?.get('adj') ?? 8333, maxAdj)) + const y1 = (ss * a) / 100000 + const y2 = h - y1 + const toDeg = (ooxmlAng: number) => ooxmlAng / 60000 + const arcFrom = ( + x0: number, + y0: number, + rx: number, + ry: number, + stAng: number, + swAng: number + ) => { + const st = (toDeg(stAng) * Math.PI) / 180 + const sw = (toDeg(swAng) * Math.PI) / 180 + const cx = x0 - rx * Math.cos(st) + const cy = y0 - ry * Math.sin(st) + const x1 = cx + rx * Math.cos(st + sw) + const y1p = cy + ry * Math.sin(st + sw) + const large = Math.abs(toDeg(swAng)) > 180 ? 1 : 0 + const sweep = swAng >= 0 ? 1 : 0 + return { cmd: `A${rx},${ry} 0 ${large},${sweep} ${x1},${y1p}`, x: x1, y: y1p } + } + + const a1 = arcFrom(0, 0, w, y1, 16200000, 5400000) // 3cd4, cd4 + const a2 = arcFrom(w, y2, w, y1, 0, 5400000) // 0, cd4 + return [`M0,0`, a1.cmd, `L${w},${y2}`, a2.cmd].join(' ') +}) + +presetShapes.set('leftBrace', (w, h, adjustments) => { + const ss = Math.min(w, h) + const a2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 50000, 100000)) + const q1 = 100000 - a2 + const q2 = Math.min(q1, a2) + const q3 = q2 / 2 + const maxAdj1 = ss > 0 ? (q3 * h) / ss : 0 + const a1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 8333, maxAdj1)) + const y1 = (ss * a1) / 100000 + const y3 = (h * a2) / 100000 + const y4 = y3 + y1 + const wd2 = w / 2 + const hc = w / 2 + const toDeg = (ooxmlAng: number) => ooxmlAng / 60000 + const arcFrom = ( + x0: number, + y0: number, + rx: number, + ry: number, + stAng: number, + swAng: number + ) => { + const st = (toDeg(stAng) * Math.PI) / 180 + const sw = (toDeg(swAng) * Math.PI) / 180 + const cx = x0 - rx * Math.cos(st) + const cy = y0 - ry * Math.sin(st) + const x1 = cx + rx * Math.cos(st + sw) + const y1p = cy + ry * Math.sin(st + sw) + const large = Math.abs(toDeg(swAng)) > 180 ? 1 : 0 + const sweep = swAng >= 0 ? 1 : 0 + return { cmd: `A${rx},${ry} 0 ${large},${sweep} ${x1},${y1p}`, x: x1, y: y1p } + } + + let x = w + let y = h + const aTop = arcFrom(x, y, wd2, y1, 5400000, 5400000) // cd4, cd4 + x = aTop.x + y = aTop.y + const aMid1 = arcFrom(hc, y4, wd2, y1, 0, -5400000) + const aMid2 = arcFrom(aMid1.x, aMid1.y, wd2, y1, 5400000, -5400000) + const aBot = arcFrom(hc, y1, wd2, y1, 10800000, 5400000) // cd2, cd4 + return [ + `M${w},${h}`, + aTop.cmd, + `L${hc},${y4}`, + aMid1.cmd, + aMid2.cmd, + `L${hc},${y1}`, + aBot.cmd, + ].join(' ') +}) + +presetShapes.set('rightBrace', (w, h, adjustments) => { + const ss = Math.min(w, h) + const a2 = Math.max(0, Math.min(adjustments?.get('adj2') ?? 50000, 100000)) + const q1 = 100000 - a2 + const q2 = Math.min(q1, a2) + const q3 = q2 / 2 + const maxAdj1 = ss > 0 ? (q3 * h) / ss : 0 + const a1 = Math.max(0, Math.min(adjustments?.get('adj1') ?? 8333, maxAdj1)) + const y1 = (ss * a1) / 100000 + const y3 = (h * a2) / 100000 + const y2 = y3 - y1 + const y4 = h - y1 + const wd2 = w / 2 + const hc = w / 2 + const toDeg = (ooxmlAng: number) => ooxmlAng / 60000 + const arcFrom = ( + x0: number, + y0: number, + rx: number, + ry: number, + stAng: number, + swAng: number + ) => { + const st = (toDeg(stAng) * Math.PI) / 180 + const sw = (toDeg(swAng) * Math.PI) / 180 + const cx = x0 - rx * Math.cos(st) + const cy = y0 - ry * Math.sin(st) + const x1 = cx + rx * Math.cos(st + sw) + const y1p = cy + ry * Math.sin(st + sw) + const large = Math.abs(toDeg(swAng)) > 180 ? 1 : 0 + const sweep = swAng >= 0 ? 1 : 0 + return { cmd: `A${rx},${ry} 0 ${large},${sweep} ${x1},${y1p}`, x: x1, y: y1p } + } + + const aTop = arcFrom(0, 0, wd2, y1, 16200000, 5400000) // 3cd4, cd4 + const aMid1 = arcFrom(hc, y2, wd2, y1, 10800000, -5400000) // cd2,-cd4 + const aMid2 = arcFrom(aMid1.x, aMid1.y, wd2, y1, 16200000, -5400000) //3cd4,-cd4 + const aBot = arcFrom(hc, y4, wd2, y1, 0, 5400000) //0,cd4 + return [`M0,0`, aTop.cmd, `L${hc},${y2}`, aMid1.cmd, aMid2.cmd, `L${hc},${y4}`, aBot.cmd].join( + ' ' + ) +}) + +// ===== Action Buttons ===== +// Action buttons are multi-path shapes: background rect + icon with darken fill + icon outline + rect outline. +// OOXML spec uses ss*3/8 as the icon half-size (dx2), with the icon centred at (hc, vc). +// Shapes with multiPathPresets entries below get proper 3D treatment. Remaining shapes +// fall back to the legacy actionButtonIcons overlay (single flat icon path). + +presetShapes.set('actionButtonBlank', (w, h) => `M0,0 L${w},0 L${w},${h} L0,${h} Z`) + +// Fallback rectangle for action buttons without multiPathPresets entry yet +// actionButtonSound fallback removed — uses multiPathPresets entry below + +// Multi-path action button presets are registered after the multiPathPresets Map +// declaration (see below in the multiPathPresets section). + +// --------------------------------------------------------------------------- +// Action button icon paths (rendered as a second with contrasting fill) +// --------------------------------------------------------------------------- +const actionButtonIcons = new Map string>() + +// actionButtonHome icon removed — uses multiPathPresets entry below + +actionButtonIcons.set('actionButtonForwardNext', (w, h) => { + // Right-pointing triangle (▶) + const cx = w / 2 + const cy = h / 2 + const s = Math.min(w, h) * 0.3 + return `M${cx - s * 0.5},${cy - s} L${cx + s},${cy} L${cx - s * 0.5},${cy + s} Z` +}) + +actionButtonIcons.set('actionButtonBackPrevious', (w, h) => { + // Left-pointing triangle (◀) + const cx = w / 2 + const cy = h / 2 + const s = Math.min(w, h) * 0.3 + return `M${cx + s * 0.5},${cy - s} L${cx - s},${cy} L${cx + s * 0.5},${cy + s} Z` +}) + +actionButtonIcons.set('actionButtonReturn', (w, h) => { + // Curved return arrow (↩) — shaft goes right at bottom, curves UP at right end, + // returns left at top with arrowhead pointing left (standard PowerPoint icon). + const cx = w / 2 + const cy = h / 2 + const s = Math.min(w, h) * 0.28 + const thick = s * 0.22 // shaft thickness + const bottomY = cy + s * 0.4 + const topY = cy - s * 0.4 + const leftX = cx - s * 0.6 + const rightX = cx + s * 0.6 + const r = (bottomY - topY) / 2 // semicircle radius + return [ + // Outer edge: bottom-left → right → arc up → left to arrowhead junction + `M${leftX},${bottomY}`, + `L${rightX},${bottomY}`, + `A${r},${r} 0 0,1 ${rightX},${topY}`, + `L${leftX + s * 0.15},${topY}`, + // Inner edge: top → right → arc down → bottom-left + `L${leftX + s * 0.15},${topY + thick}`, + `L${rightX - thick * 0.3},${topY + thick}`, + `A${r - thick},${r - thick} 0 0,0 ${rightX - thick * 0.3},${bottomY - thick}`, + `L${leftX},${bottomY - thick}`, + `Z`, + // Arrowhead pointing left at top-left + `M${leftX - s * 0.3},${topY + thick / 2}`, + `L${leftX + s * 0.15},${topY - s * 0.2}`, + `L${leftX + s * 0.15},${topY + thick + s * 0.2}`, + `Z`, + ].join(' ') +}) + +actionButtonIcons.set('actionButtonBeginning', (w, h) => { + // Skip-to-beginning (|◀) + const cx = w / 2 + const cy = h / 2 + const s = Math.min(w, h) * 0.28 + return [ + // Left bar + `M${cx - s},${cy - s} L${cx - s + s * 0.2},${cy - s} L${cx - s + s * 0.2},${cy + s} L${cx - s},${cy + s} Z`, + // Left-pointing triangle + `M${cx + s},${cy - s} L${cx - s + s * 0.35},${cy} L${cx + s},${cy + s} Z`, + ].join(' ') +}) + +actionButtonIcons.set('actionButtonEnd', (w, h) => { + // Skip-to-end (▶|) + const cx = w / 2 + const cy = h / 2 + const s = Math.min(w, h) * 0.28 + return [ + // Right bar + `M${cx + s - s * 0.2},${cy - s} L${cx + s},${cy - s} L${cx + s},${cy + s} L${cx + s - s * 0.2},${cy + s} Z`, + // Right-pointing triangle + `M${cx - s},${cy - s} L${cx + s - s * 0.35},${cy} L${cx - s},${cy + s} Z`, + ].join(' ') +}) + +// actionButtonHelp icon removed — uses multiPathPresets entry below + +actionButtonIcons.set('actionButtonInformation', (w, h) => { + // Info icon (i) + const cx = w / 2 + const cy = h / 2 + const s = Math.min(w, h) * 0.28 + return [ + // Dot + `M${cx - s * 0.1},${cy - s * 0.65} L${cx + s * 0.1},${cy - s * 0.65} L${cx + s * 0.1},${cy - s * 0.4} L${cx - s * 0.1},${cy - s * 0.4} Z`, + // Stem + `M${cx - s * 0.12},${cy - s * 0.2} L${cx + s * 0.12},${cy - s * 0.2} L${cx + s * 0.12},${cy + s * 0.65} L${cx - s * 0.12},${cy + s * 0.65} Z`, + ].join(' ') +}) + +actionButtonIcons.set('actionButtonDocument', (w, h) => { + // Document with folded corner + const cx = w / 2 + const cy = h / 2 + const s = Math.min(w, h) * 0.28 + const dx = s * 0.7 + const dy = s + const fold = s * 0.3 + return [ + `M${cx - dx},${cy - dy}`, + `L${cx + dx - fold},${cy - dy} L${cx + dx},${cy - dy + fold}`, + `L${cx + dx},${cy + dy} L${cx - dx},${cy + dy} Z`, + `M${cx + dx - fold},${cy - dy} L${cx + dx - fold},${cy - dy + fold} L${cx + dx},${cy - dy + fold}`, + ].join(' ') +}) + +// actionButtonSound icon removed — uses multiPathPresets entry below + +// actionButtonMovie icon is now rendered via multiPathPresets (see below). + +/** + * Get the SVG path for the icon overlay of an action button. + * Returns undefined if the shape is not an action button or is actionButtonBlank. + */ +export function getActionButtonIconPath( + shapeType: string, + w: number, + h: number +): string | undefined { + const key = shapeType.toLowerCase() + const generator = actionButtonIcons.get(key) ?? actionButtonIcons.get(shapeType) + return generator?.(w, h) +} + +// ===== Aliases and common alternative names ===== + +// Some shapes are known by multiple names in different OOXML versions +// flowChartOfflineStorage: registered as multiPathPreset (see below) + +// ribbon is implemented as multiPathPreset (see multiPathPresets below) + +presetShapes.set('wave', (w, h, adjustments) => { + // OOXML: adj1=12500 (max 20000), adj2=0 (phase shift, range -10000..10000) + const a1 = Math.min(Math.max(adjRaw(adjustments, 'adj1', 12500), 0), 20000) + const a2 = Math.min(Math.max(adjRaw(adjustments, 'adj2', 0), -10000), 10000) + const y1 = (h * a1) / 100000 + const dy2 = (y1 * 10) / 3 + const y2 = y1 - dy2 // control above crest + const y3 = y1 + dy2 // control below crest + const y4 = h - y1 // bottom wave y + const y5 = y4 - dy2 + const y6 = y4 + dy2 + // Phase shift + const of2 = (w * a2) / 50000 + const dx2 = of2 < 0 ? 0 : of2 + const dx5 = of2 < 0 ? of2 : 0 + const x2 = -dx2 + const x5 = w - dx5 + const dx3 = (x5 - x2) / 3 + const x3 = x2 + dx3 + const x4 = (x3 + x5) / 2 + const x6 = dx5 + const x10 = w + dx2 + const x7 = x6 + (x10 - x6) / 3 + const x8 = (x7 + x10) / 2 + return [ + `M${x2},${y1}`, + `C${x3},${y2} ${x4},${y3} ${x5},${y1}`, + `L${x10},${y4}`, + `C${x8},${y6} ${x7},${y5} ${x6},${y4}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('doubleWave', (w, h, adjustments) => { + // OOXML: adj1=6250 (max 12500), adj2=0 (phase shift) + const a1 = Math.min(Math.max(adjRaw(adjustments, 'adj1', 6250), 0), 12500) + const a2 = Math.min(Math.max(adjRaw(adjustments, 'adj2', 0), -10000), 10000) + const y1 = (h * a1) / 100000 + const dy2 = (y1 * 10) / 3 + const y2 = y1 - dy2 + const y3 = y1 + dy2 + const y4 = h - y1 + const y5 = y4 - dy2 + const y6 = y4 + dy2 + const of2 = (w * a2) / 50000 + const dx2 = of2 < 0 ? 0 : of2 + const dx8 = of2 < 0 ? of2 : 0 + const x2 = -dx2 + const x8 = w - dx8 + const dx3 = (x8 - x2) / 6 + const x3 = x2 + dx3 + const dx4 = (x8 - x2) / 3 + const x4 = x2 + dx4 + const x5 = (x2 + x8) / 2 + const x6 = x5 + dx3 + const x7 = (x6 + x8) / 2 + const x9 = dx8 + const x15 = w + dx2 + const dx3b = (x15 - x9) / 6 + const x10 = x9 + dx3b + const x11 = x9 + (x15 - x9) / 3 + const x12 = (x9 + x15) / 2 + const x13 = x12 + dx3b + const x14 = (x13 + x15) / 2 + return [ + `M${x2},${y1}`, + `C${x3},${y2} ${x4},${y3} ${x5},${y1}`, + `C${x6},${y2} ${x7},${y3} ${x8},${y1}`, + `L${x15},${y4}`, + `C${x14},${y6} ${x13},${y5} ${x12},${y4}`, + `C${x11},${y6} ${x10},${y5} ${x9},${y4}`, + 'Z', + ].join(' ') +}) + +// verticalScroll and horizontalScroll are implemented as multi-path presets +// (see multiPathPresets below) for accurate OOXML rendering with darkenLess shadows. + +presetShapes.set('irregularSeal1', (w, h) => { + // OOXML spec: exact coordinates on 21600x21600 grid + const sx = (x: number) => (w * x) / 21600 + const sy = (y: number) => (h * y) / 21600 + return [ + `M${sx(10800)},${sy(5800)}`, + `L${sx(14522)},0`, + `L${sx(14155)},${sy(5325)}`, + `L${sx(18380)},${sy(4457)}`, + `L${sx(16702)},${sy(7315)}`, + `L${sx(21097)},${sy(8137)}`, + `L${sx(17607)},${sy(10475)}`, + `L${sx(21600)},${sy(13290)}`, + `L${sx(16837)},${sy(12942)}`, + `L${sx(18145)},${sy(18095)}`, + `L${sx(14020)},${sy(14457)}`, + `L${sx(13247)},${sy(19737)}`, + `L${sx(10532)},${sy(14935)}`, + `L${sx(8485)},${sy(21600)}`, + `L${sx(7715)},${sy(15627)}`, + `L${sx(4762)},${sy(17617)}`, + `L${sx(5667)},${sy(13937)}`, + `L${sx(135)},${sy(14587)}`, + `L${sx(3722)},${sy(11775)}`, + `L0,${sy(8615)}`, + `L${sx(4627)},${sy(7617)}`, + `L${sx(370)},${sy(2295)}`, + `L${sx(7312)},${sy(6320)}`, + `L${sx(8352)},${sy(2295)}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('irregularSeal2', (w, h) => { + // Office-like irregularSeal2 coordinates (21600 design grid). + return [ + `M${(w * 11462) / 21600},${(h * 4342) / 21600}`, + `L${(w * 14790) / 21600},0`, + `L${(w * 14525) / 21600},${(h * 5777) / 21600}`, + `L${(w * 18007) / 21600},${(h * 3172) / 21600}`, + `L${(w * 16380) / 21600},${(h * 6532) / 21600}`, + `L${w},${(h * 6645) / 21600}`, + `L${(w * 16985) / 21600},${(h * 9402) / 21600}`, + `L${(w * 18270) / 21600},${(h * 11290) / 21600}`, + `L${(w * 16380) / 21600},${(h * 12310) / 21600}`, + `L${(w * 18877) / 21600},${(h * 15632) / 21600}`, + `L${(w * 14640) / 21600},${(h * 14350) / 21600}`, + `L${(w * 14942) / 21600},${(h * 17370) / 21600}`, + `L${(w * 12180) / 21600},${(h * 15935) / 21600}`, + `L${(w * 11612) / 21600},${(h * 18842) / 21600}`, + `L${(w * 9872) / 21600},${(h * 17370) / 21600}`, + `L${(w * 8700) / 21600},${(h * 19712) / 21600}`, + `L${(w * 7527) / 21600},${(h * 18125) / 21600}`, + `L${(w * 4917) / 21600},${h}`, + `L${(w * 4805) / 21600},${(h * 18240) / 21600}`, + `L${(w * 1285) / 21600},${(h * 17825) / 21600}`, + `L${(w * 3330) / 21600},${(h * 15370) / 21600}`, + `L0,${(h * 12877) / 21600}`, + `L${(w * 3935) / 21600},${(h * 11592) / 21600}`, + `L${(w * 1172) / 21600},${(h * 8270) / 21600}`, + `L${(w * 5372) / 21600},${(h * 7817) / 21600}`, + `L${(w * 4502) / 21600},${(h * 3625) / 21600}`, + `L${(w * 8550) / 21600},${(h * 6382) / 21600}`, + `L${(w * 9722) / 21600},${(h * 1887) / 21600}`, + 'Z', + ].join(' ') +}) + +presetShapes.set('teardrop', (w, h) => { + const rx = w / 2 + const ry = h / 2 + return [`M${w},${ry}`, `A${rx},${ry} 0 1,1 ${rx},0`, `L${w},0`, `L${w},${ry}`, 'Z'].join(' ') +}) + +presetShapes.set('pie', (w, h, adjustments) => { + // OOXML pie: adj1 = start angle, adj2 = end angle (60000ths of a degree). Sweep clockwise from start to end. + // OOXML angles are "visual" (geometric) — must convert to parametric for ellipses (rx≠ry). + const adj1Raw = adjustments?.get('adj1') ?? 0 + const adj2Raw = adjustments?.get('adj2') ?? 16200000 // 270° end default + const startDeg = (adj1Raw / 60000) % 360 + const endDeg = (adj2Raw / 60000) % 360 + let sweepDeg = (((endDeg - startDeg) % 360) + 360) % 360 + if (sweepDeg === 0 && startDeg !== endDeg) sweepDeg = 360 + const rx = w / 2 + const ry = h / 2 + const toRad = (d: number) => (d * Math.PI) / 180 + const visualToParam = (deg: number) => + Math.atan2(Math.sin(toRad(deg)) / ry, Math.cos(toRad(deg)) / rx) + const startParam = visualToParam(startDeg) + const endParam = visualToParam(endDeg) + const x1 = rx + rx * Math.cos(startParam) + const y1 = ry + ry * Math.sin(startParam) + const x2 = rx + rx * Math.cos(endParam) + const y2 = ry + ry * Math.sin(endParam) + const largeArc = sweepDeg > 180 ? 1 : 0 + return [`M${rx},${ry}`, `L${x1},${y1}`, `A${rx},${ry} 0 ${largeArc},1 ${x2},${y2}`, 'Z'].join(' ') +}) + +presetShapes.set('pieWedge', (w, h) => { + // OOXML: Quarter-ellipse pie wedge. Center at (w, h), radii = (w, h). + // Arc from 180° sweeping 90° CW: starts at (0, h), ends at (w, 0). + // The arc bulges toward the upper-left. + return [`M0,${h}`, `A${w},${h} 0 0,1 ${w},0`, `L${w},${h}`, 'Z'].join(' ') +}) + +presetShapes.set('arc', (w, h, adjustments) => { + // OOXML arc: adj1/adj2 are angles in 60000ths of a degree + // OOXML angles are "visual" (geometric) — must convert to parametric for ellipses (rx≠ry). + const adj1Raw = adjustments?.get('adj1') ?? 16200000 // default 270° + const adj2Raw = adjustments?.get('adj2') ?? 0 // default 0° + const startDeg = adj1Raw / 60000 + const endDeg = adj2Raw / 60000 + const rx = w / 2 + const ry = h / 2 + const toRad = (d: number) => (d * Math.PI) / 180 + const visualToParam = (deg: number) => + Math.atan2(Math.sin(toRad(deg)) / ry, Math.cos(toRad(deg)) / rx) + const startParam = visualToParam(startDeg) + const endParam = visualToParam(endDeg) + const x1 = rx + rx * Math.cos(startParam) + const y1 = ry + ry * Math.sin(startParam) + const x2 = rx + rx * Math.cos(endParam) + const y2 = ry + ry * Math.sin(endParam) + let sweepDeg = (((endDeg - startDeg) % 360) + 360) % 360 + if (sweepDeg === 0 && startDeg !== endDeg) sweepDeg = 360 + const largeArc = sweepDeg > 180 ? 1 : 0 + return `M${x1},${y1} A${rx},${ry} 0 ${largeArc},1 ${x2},${y2}` +}) + +presetShapes.set('chord', (w, h, adjustments) => { + // OOXML chord: arc + chord line. Spec uses ellipse (arcTo wR="wd2" hR="hd2") per presetShapeDefinitions. + // OOXML angles are "visual" (geometric) angles — the angle of the ray from center to the point. + // For ellipses (rx≠ry), convert to parametric angle: t = atan2(sin(θ)/ry, cos(θ)/rx) + const adj1Raw = adjustments?.get('adj1') ?? 2700000 // default 45° + const adj2Raw = adjustments?.get('adj2') ?? 16200000 // default 270° + const startDeg = adj1Raw / 60000 + const endDeg = adj2Raw / 60000 + const cx = w / 2 + const cy = h / 2 + const rx = w / 2 + const ry = h / 2 + const toRad = (d: number) => (d * Math.PI) / 180 + // Convert OOXML visual angles to parametric angles on the ellipse + const visualToParam = (deg: number) => + Math.atan2(Math.sin(toRad(deg)) / ry, Math.cos(toRad(deg)) / rx) + const startParam = visualToParam(startDeg) + const endParam = visualToParam(endDeg) + const x1 = cx + rx * Math.cos(startParam) + const y1 = cy + ry * Math.sin(startParam) + const x2 = cx + rx * Math.cos(endParam) + const y2 = cy + ry * Math.sin(endParam) + // Use OOXML visual sweep to determine large-arc-flag + let sweepDeg = (((endDeg - startDeg) % 360) + 360) % 360 + if (sweepDeg === 0 && startDeg !== endDeg) sweepDeg = 360 + // When adj1 == adj2, the chord covers the full ellipse (360° sweep) + if (sweepDeg === 0) { + return `M${cx - rx},${cy} A${rx},${ry} 0 1,1 ${cx + rx},${cy} A${rx},${ry} 0 1,1 ${cx - rx},${cy} Z` + } + const largeArc = sweepDeg > 180 ? 1 : 0 + return `M${x1},${y1} A${rx},${ry} 0 ${largeArc},1 ${x2},${y2} Z` +}) + +presetShapes.set('funnel', (w, h) => { + // OOXML funnel: top rim ellipse arc + tapered sides + bottom spout arc + inset top ellipse. + // From presetShapeDefinitions.xml (ECMA-376). + const ss = Math.min(w, h) + const wd2 = w / 2 + const hd4 = h / 4 + const hc = w / 2 + const b = h + + const d = ss / 20 // inset margin + const rw2 = wd2 - d // inset top-ellipse x-radius + const rh2 = hd4 - d // inset top-ellipse y-radius + + // Angle (in radians) where funnel sides are tangent to top ellipse. + // OOXML: t1 = cos(wd2, 480000), t2 = sin(hd4, 480000) → da = atan2(t1, t2) + // 480000 = 8° in 60000ths of a degree + const ang8 = (8 * Math.PI) / 180 + const t1 = wd2 * Math.cos(ang8) + const t2 = hd4 * Math.sin(ang8) + const da = Math.atan2(t2, t1) // radians + + // Angles for the top rim arc (OOXML convention: sweep from stAng1 by swAng1) + const stAng1 = Math.PI - da // cd2 - da + const swAng1 = Math.PI + 2 * da // cd2 + 2*da + + // Sweep for the bottom spout arc + const swAng3 = Math.PI - 2 * da // cd2 - 2*da + + // Bottom spout ellipse radii: 1/4 of top ellipse + const rw3 = wd2 / 4 + const rh3 = hd4 / 4 + + // Start point on top ellipse at stAng1 (visual angle → ellipse point) + // OOXML uses: n = (wR*hR) / mod(cos(hR,ang), sin(wR,ang), 0), then x = hc + cos(n,ang), y = hd4 + sin(n,ang) + // This is equivalent to the parametric ellipse point at the "visual" angle. + const ct1 = hd4 * Math.cos(stAng1) + const st1 = wd2 * Math.sin(stAng1) + const m1 = Math.sqrt(ct1 * ct1 + st1 * st1) + const n1 = (wd2 * hd4) / m1 + const dx1 = n1 * Math.cos(stAng1) + const dy1 = n1 * Math.sin(stAng1) + const x1 = hc + dx1 + const y1 = hd4 + dy1 + + // End point of top arc (at stAng1 + swAng1 = pi + da) + const endAng1 = stAng1 + swAng1 + const ct1e = hd4 * Math.cos(endAng1) + const st1e = wd2 * Math.sin(endAng1) + const m1e = Math.sqrt(ct1e * ct1e + st1e * st1e) + const n1e = (wd2 * hd4) / m1e + const dx1e = n1e * Math.cos(endAng1) + const dy1e = n1e * Math.sin(endAng1) + const x1e = hc + dx1e + const y1e = hd4 + dy1e + + // Point on spout ellipse at angle da + const vc3 = b - rh3 // vertical center of spout ellipse + const ct3 = rh3 * Math.cos(da) + const st3 = rw3 * Math.sin(da) + const m3 = Math.sqrt(ct3 * ct3 + st3 * st3) + const n3 = (rw3 * rh3) / m3 + const dx3 = n3 * Math.cos(da) + const dy3 = n3 * Math.sin(da) + const x3 = hc + dx3 + const y2 = vc3 + dy3 + + // End point of spout arc (at da + swAng3) + const endAng3 = da + swAng3 + const ct3e = rh3 * Math.cos(endAng3) + const st3e = rw3 * Math.sin(endAng3) + const m3e = Math.sqrt(ct3e * ct3e + st3e * st3e) + const n3e = (rw3 * rh3) / m3e + const dx3e = n3e * Math.cos(endAng3) + const dy3e = n3e * Math.sin(endAng3) + const x3e = hc + dx3e + const y2e = vc3 + dy3e + + // Determine arc flags + const swDeg1 = (swAng1 * 180) / Math.PI + const largeArc1 = Math.abs(swDeg1) > 180 ? 1 : 0 + const sweep1 = swAng1 > 0 ? 1 : 0 + + const swDeg3 = (swAng3 * 180) / Math.PI + const largeArc3 = Math.abs(swDeg3) > 180 ? 1 : 0 + const sweep3 = swAng3 > 0 ? 1 : 0 + + // Sub-path 1: Funnel body (top arc → line to spout → spout arc → close) + const body = [ + `M${x1},${y1}`, + `A${wd2},${hd4} 0 ${largeArc1},${sweep1} ${x1e},${y1e}`, + `L${x3},${y2}`, + `A${rw3},${rh3} 0 ${largeArc3},${sweep3} ${x3e},${y2e}`, + 'Z', + ].join(' ') + + // Sub-path 2: Inset top ellipse (full ellipse, counter-clockwise for even-odd hole) + const x2 = wd2 - rw2 // leftmost point of inset ellipse + const x2r = wd2 + rw2 // rightmost point + const inset = [ + `M${x2},${hd4}`, + `A${rw2},${rh2} 0 1,0 ${x2r},${hd4}`, + `A${rw2},${rh2} 0 1,0 ${x2},${hd4}`, + 'Z', + ].join(' ') + + return `${body} ${inset}` +}) + +// ===== Fallback ===== + +/** + * Get the SVG path for a preset shape, falling back to a simple rectangle + * if the shape type is not implemented. + */ +// --------------------------------------------------------------------------- +// Preset shape overlays — additional paths for 3D-like shapes (lighter top face, etc.) +// --------------------------------------------------------------------------- + +export interface PresetOverlay { + /** SVG path d-attribute for the overlay */ + path: string + /** Fill modifier: 'lighten' brightens the base fill */ + fillModifier: 'lighten' +} + +export type PresetOverlayGenerator = ( + w: number, + h: number, + adjustments?: Map +) => PresetOverlay[] + +const presetOverlays: Map = new Map() + +presetOverlays.set('can', (w, h) => { + const ry = h * 0.1 + const rx = w / 2 + return [ + { + path: [`M0,${ry}`, `A${rx},${ry} 0 0,1 ${w},${ry}`, `A${rx},${ry} 0 0,1 0,${ry}`, 'Z'].join( + ' ' + ), + fillModifier: 'lighten', + }, + ] +}) + +/** + * Get overlay paths for a preset shape (3D top faces, etc.). + * Returns empty array if the shape has no overlays. + */ +export function getPresetOverlays( + shapeType: string, + w: number, + h: number, + adjustments?: Map +): PresetOverlay[] { + const key = shapeType.toLowerCase() + const gen = presetOverlays.get(key) ?? presetOverlays.get(shapeType) + return gen ? gen(w, h, adjustments) : [] +} + +// --------------------------------------------------------------------------- +// Multi-path preset shapes — complex shapes with multiple SVG paths +// Each path has its own fill modifier and stroke behavior, matching OOXML spec. +// --------------------------------------------------------------------------- + +/** A single sub-path within a multi-path preset shape. */ +export interface PresetSubPath { + /** SVG path d-attribute string */ + d: string + /** + * Fill behavior: + * - 'norm': use the shape's normal fill + * - 'darken': darken the base fill (multiply with ~60% gray) + * - 'darkenLess': slightly darken (multiply with ~80% gray) + * - 'lighten': lighten the base fill + * - 'lightenLess': slightly lighten + * - 'none': no fill (stroke-only detail lines) + */ + fill: 'norm' | 'darken' | 'darkenLess' | 'lighten' | 'lightenLess' | 'none' + /** Whether this path should have a stroke (default true) */ + stroke: boolean + /** Optional stroke width multiplier for detail lines that should render lighter than the outline. */ + strokeWidthScale?: number + /** Restrict visibility of this detail path to a stroke band around the main outline path. */ + maskToMainOutline?: boolean + /** Optional scale for the outline-band mask stroke width. */ + maskStrokeScale?: number + /** Restrict visibility of this detail path to the band between the main outline and an inset-scaled outline. */ + maskToMainOutlineBandScale?: number +} + +type MultiPathPresetGenerator = ( + w: number, + h: number, + adjustments?: Map +) => PresetSubPath[] + +const multiPathPresets: Map = new Map() + +// ===== Action Button multi-path presets (OOXML spec-accurate) ===== +// Common helper: OOXML action button guide values +function _abGuides(w: number, h: number) { + const ss = Math.min(w, h) + const hc = w / 2 + const vc = h / 2 + const dx2 = (ss * 3) / 8 // icon half-extent + return { + ss, + hc, + vc, + dx2, + g9: vc - dx2, + g10: vc + dx2, + g11: hc - dx2, + g12: hc + dx2, + g13: (ss * 3) / 4, + } +} +const _rect = (w: number, h: number) => `M0,0 L${w},0 L${w},${h} L0,${h} Z` + +// actionButtonForwardNext (VBA 0130): right-pointing triangle ▶ +multiPathPresets.set('actionButtonForwardNext', (w, h) => { + const { g9, g10, g11, g12, vc } = _abGuides(w, h) + const tri = `M${g12},${vc} L${g11},${g9} L${g11},${g10} Z` + return [ + { d: `${_rect(w, h)} ${tri}`, fill: 'norm', stroke: false }, + { d: tri, fill: 'darken', stroke: false }, + { d: tri, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('actionButtonForward', (w, h) => { + const forwardNext = multiPathPresets.get('actionButtonForwardNext') + return forwardNext ? forwardNext(w, h) : [] +}) + +// actionButtonBackPrevious (VBA 0129): left-pointing triangle ◀ +multiPathPresets.set('actionButtonBackPrevious', (w, h) => { + const { g9, g10, g11, g12, vc } = _abGuides(w, h) + const tri = `M${g11},${vc} L${g12},${g9} L${g12},${g10} Z` + return [ + { d: `${_rect(w, h)} ${tri}`, fill: 'norm', stroke: false }, + { d: tri, fill: 'darken', stroke: false }, + { d: tri, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// actionButtonBeginning (VBA 0131): |◀ skip-to-start +multiPathPresets.set('actionButtonBeginning', (w, h) => { + const { g9, g10, g11, g12, g13, vc } = _abGuides(w, h) + const g14 = g13 / 8 + const g15 = g13 / 4 + const g16 = g11 + g14 + const g17 = g11 + g15 + const tri = `M${g17},${vc} L${g12},${g9} L${g12},${g10} Z` + const bar = `M${g16},${g9} L${g11},${g9} L${g11},${g10} L${g16},${g10} Z` + const icon = `${tri} ${bar}` + return [ + { d: `${_rect(w, h)} ${icon}`, fill: 'norm', stroke: false }, + { d: icon, fill: 'darken', stroke: false }, + { d: icon, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// actionButtonEnd (VBA 0132): ▶| skip-to-end +multiPathPresets.set('actionButtonEnd', (w, h) => { + const { g9, g10, g11, g12, g13, vc } = _abGuides(w, h) + const g14 = (g13 * 3) / 4 + const g15 = (g13 * 7) / 8 + const g16 = g11 + g14 + const g17 = g11 + g15 + const tri = `M${g16},${vc} L${g11},${g9} L${g11},${g10} Z` + const bar = `M${g17},${g9} L${g12},${g9} L${g12},${g10} L${g17},${g10} Z` + const icon = `${tri} ${bar}` + return [ + { d: `${_rect(w, h)} ${icon}`, fill: 'norm', stroke: false }, + { d: icon, fill: 'darken', stroke: false }, + { d: icon, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// actionButtonReturn (VBA 0133): curved return arrow ↩ +// OOXML spec: 4 paths – bg+icon cutout (norm), icon fill (darken), icon outline (stroke), rect outline (stroke) +// Fill paths use inner arcs curving inward; outline path traces the full shape with reversed arc winding. +multiPathPresets.set('actionButtonReturn', (w, h) => { + const { g9, g10, g11, g12, g13, hc, vc: _vcR } = _abGuides(w, h) + const g14 = (g13 * 7) / 8 + const g15 = (g13 * 3) / 4 + const g16 = (g13 * 5) / 8 + const g17 = (g13 * 3) / 8 // outer arc radius + const g18 = g13 / 4 + const g27 = g13 / 8 // inner arc radius + const g19 = g9 + g15 + const g20 = g9 + g16 + const g21 = g9 + g18 + const g22 = g11 + g14 + const g23 = g11 + g15 + const g24 = g11 + g16 + const g25 = g11 + g17 + const g26 = g11 + g18 + + // Fill icon path (paths 0 & 1 in OOXML spec — identical geometry) + // Arc 1: from (g24, g20), wR=g27 hR=g27 stAng=0° swAng=90° + // center = (g24-g27, g20), endpoint = (g24-g27, g20+g27) = (g24-g27, g19) + // Arc 2: from (g25, g19), wR=g27 hR=g27 stAng=90° swAng=90° + // center = (g25, g19-g27), endpoint = (g25-g27, g19-g27) = (g26, g20) + // Arc 3: from (g11, g20), wR=g17 hR=g17 stAng=180° swAng=-90° + // center = (g11+g17, g20) = (g25, g20), endpoint = (g25, g20+g17) = (g25, g10) + // Arc 4: from (hc, g10), wR=g17 hR=g17 stAng=90° swAng=-90° + // center = (hc, g10-g17), endpoint = (hc+g17, g10-g17) + const fillIcon = [ + `M${g12},${g21}`, + `L${g23},${g9}`, + `L${hc},${g21}`, + `L${g24},${g21}`, + `L${g24},${g20}`, + `A${g27},${g27} 0 0,1 ${g24 - g27},${g19}`, // arc 1: inner bottom-right corner + `L${g25},${g19}`, // across inner bottom + `A${g27},${g27} 0 0,1 ${g26},${g20}`, // arc 2: inner bottom-left corner + `L${g26},${g21}`, + `L${g11},${g21}`, + `L${g11},${g20}`, + `A${g17},${g17} 0 0,0 ${g25},${g10}`, // arc 3: outer bottom-left curve + `L${hc},${g10}`, // across outer bottom + `A${g17},${g17} 0 0,0 ${hc + g17},${g10 - g17}`, // arc 4: outer bottom-right curve + `L${g22},${g21}`, + `Z`, + ].join(' ') + + // Outline path (path 2 in OOXML spec — traces shape with different arc winding) + // Starts from right outer edge, traces clockwise: outer right → outer bottom → outer left → inner left → inner bottom → inner right → arrow + // Arc A: from (g22, g20), wR=g17 hR=g17 stAng=0° swAng=90° + // center = (g22-g17, g20) = (g22-g17, g20), endpoint = (g22-g17, g20+g17) + // g22-g17 = g11+g14-g17 = g11 + g13*7/8 - g13*3/8 = g11 + g13/2 = g25 + g13/8 = hc? No. + // Actually: g22 = g11+g14, g14 = g13*7/8, g17 = g13*3/8 + // g22 - g17 = g11 + g13*7/8 - g13*3/8 = g11 + g13*4/8 = g11 + g13/2 = hc (since hc = g11 + dx2 = g11 + g13/2) + // Hmm wait, dx2 = ss*3/8 and g13 = ss*3/4. So g13/2 = ss*3/8 = dx2. So hc = g11 + dx2 = g11 + g13/2. Yes! + // endpoint = (hc, g20+g17) = (hc, g10)? g20+g17 = (g9+g16)+g17 = g9+g13*5/8+g13*3/8 = g9+g13 = g9+ss*3/4 + // g10 = vc+dx2. g9+g13 = (vc-dx2) + 2*dx2 = vc+dx2 = g10. Yes! endpoint = (hc, g10) ✓ but wait... + // Actually stAng=0° means start angle is 0°. center = (g22 - g17*cos(0), g20 - g17*sin(0)) = (g22-g17, g20). + // endAng = 0+90 = 90°. endX = center.x + g17*cos(90°) = g22-g17. endY = center.y + g17*sin(90°) = g20+g17. + // So endpoint = (g22-g17, g20+g17). Let's verify: g22-g17 = g11+g14-g17 = g11+g13*(7/8-3/8) = g11+g13/2 = g25+g13/8 + // Hmm, g25 = g11+g17 = g11+g13*3/8. g11+g13/2 = g11+g13*4/8. That's not g25, it's g25 + g13/8. + // Actually let me just compute: g11+g13/2. g13/2 is not one of the named guides. + // OK, the spec says after this arc: lnTo (g25, g10). So endpoint.x must be something, then line to g25. + // endpoint.x = g22-g17 = g11+g14-g17 = g11+g13*7/8-g13*3/8 = g11+g13*4/8 = g11+g13/2. + // Then lnTo (g25, g10) where g25 = g11+g13*3/8. + // endpointY = g20+g17 = g10. So endpoint = (g11+g13/2, g10). + // Line from there to (g25, g10) is horizontal. Makes sense. + // Arc B: from (g25, g10), wR=g17 hR=g17 stAng=90° swAng=90° + // center = (g25, g10-g17), endAng=180° + // endX = g25+g17*cos(180°) = g25-g17 = g11+g17-g17 = g11 + // endY = (g10-g17)+g17*sin(180°) = g10-g17 = g20 + // endpoint = (g11, g20). Then lnTo (g11, g21). + // Arc C: from (g26, g20), wR=g27 hR=g27 stAng=180° swAng=-90° + // center = (g26+g27, g20) = (g26+g27, g20). g26+g27 = g11+g18+g13/8 = g11+g13/4+g13/8 = g11+g13*3/8 = g25 + // endAng = 180-90 = 90°. endX = g25+g27*cos(90°) = g25. endY = g20+g27*sin(90°) = g20+g27 = g19. + // endpoint = (g25, g19). Hmm, but spec says lnTo(hc, g19) after this arc. + // Wait: lnTo before spec says ``. So endpoint is (g25, g19), then line to (hc, g19). + // Hmm actually spec says: ``. + // Wait no: `L(hc, g19)` in the spec. + // Arc D: from (hc, g19), wR=g27 hR=g27 stAng=90° swAng=-90° + // center = (hc, g19-g27), endAng = 0°. + // endX = hc+g27*cos(0°) = hc+g27. g19-g27 = g20. endY = g20+g27*sin(0°) = g20. + // endpoint = (hc+g27, g20). Hmm, but g24 = g11+g16 = g11+g13*5/8. + // hc+g27 = g11+g13/2+g13/8 = g11+g13*5/8 = g24. So endpoint = (g24, g20). + // Then lnTo (g24, g21). Then lnTo (hc, g21). Then lnTo (g23, g9). Close. + + const outline = [ + `M${g12},${g21}`, + `L${g22},${g21}`, + `L${g22},${g20}`, + `A${g17},${g17} 0 0,1 ${g11 + g13 / 2},${g10}`, // arc A: outer bottom-right (0°→90°) + `L${g25},${g10}`, // across outer bottom + `A${g17},${g17} 0 0,1 ${g11},${g20}`, // arc B: outer bottom-left (90°→180°) + `L${g11},${g21}`, + `L${g26},${g21}`, + `L${g26},${g20}`, + `A${g27},${g27} 0 0,0 ${g25},${g19}`, // arc C: inner bottom-left (180°→90°, CCW) + `L${hc},${g19}`, // across inner bottom + `A${g27},${g27} 0 0,0 ${g24},${g20}`, // arc D: inner bottom-right (90°→0°, CCW) + `L${g24},${g21}`, + `L${hc},${g21}`, + `L${g23},${g9}`, + `Z`, + ].join(' ') + + return [ + { d: `${_rect(w, h)} ${fillIcon}`, fill: 'norm', stroke: false }, + { d: fillIcon, fill: 'darken', stroke: false }, + { d: outline, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// actionButtonSound (VBA 0135): speaker icon with 3 sound wave lines +// OOXML spec: 4 paths – bg+speaker cutout (norm), speaker fill (darken), speaker outline+waves (stroke), rect outline (stroke) +multiPathPresets.set('actionButtonSound', (w, h) => { + const { g9, g10, g11, g12, g13, hc: _hcS, vc } = _abGuides(w, h) + // Guide calculations from OOXML presetShapeDefinitions.xml + const g14 = g13 / 8 + const g15 = (g13 * 5) / 16 + const g16 = (g13 * 5) / 8 + const g17 = (g13 * 11) / 16 + const g18 = (g13 * 3) / 4 + const g19 = (g13 * 7) / 8 + + // Absolute positions + const g20 = g9 + g14 + const g21 = g9 + g15 + const g22 = g9 + g17 + const g23 = g9 + g19 + const g24 = g11 + g15 + const g25 = g11 + g16 + const g26 = g11 + g18 + + // Speaker shape (pentagon-like) + const speaker = `M${g11},${g21} L${g11},${g22} L${g24},${g22} L${g25},${g10} L${g25},${g9} L${g24},${g21} Z` + + // Outline path: speaker outline (different winding) + 3 sound wave lines + const speakerOutline = `M${g11},${g21} L${g24},${g21} L${g25},${g9} L${g25},${g10} L${g24},${g22} L${g11},${g22} Z` + + const waveLine1 = `M${g26},${g21} L${g12},${g20}` // top-right diagonal + const waveLine2 = `M${g26},${vc} L${g12},${vc}` // middle horizontal + const waveLine3 = `M${g26},${g22} L${g12},${g23}` // bottom-right diagonal + + const outlineWithWaves = `${speakerOutline} ${waveLine1} ${waveLine2} ${waveLine3}` + + return [ + { d: `${_rect(w, h)} ${speaker}`, fill: 'norm', stroke: false }, + { d: speaker, fill: 'darken', stroke: false }, + { d: outlineWithWaves, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// actionButtonInformation (VBA 0128): circle with "i" inside +multiPathPresets.set('actionButtonInformation', (w, h) => { + const { g9, g10, g11, g13, hc, vc: _vcI, dx2 } = _abGuides(w, h) + const g14 = g13 / 32 + const g17v = (g13 * 5) / 16 + const g18v = (g13 * 3) / 8 + const g19v = (g13 * 13) / 32 + const g20v = (g13 * 19) / 32 + const g22v = (g13 * 11) / 16 + const g23v = (g13 * 13) / 16 + const g24v = (g13 * 7) / 8 + const g38 = (g13 * 3) / 32 + const y25 = g9 + g14 + const y28 = g9 + g17v + const y29 = g9 + g18v + const y30 = g9 + g23v + const y31 = g9 + g24v + const x32 = g11 + g17v + const x34 = g11 + g19v + const x35 = g11 + g20v + const x37 = g11 + g22v + const circle = `M${hc},${g9} A${dx2},${dx2} 0 1,1 ${hc},${g10} A${dx2},${dx2} 0 1,1 ${hc},${g9} Z` + const dot = `M${hc},${y25} A${g38},${g38} 0 1,1 ${hc},${y25 + g38 * 2} A${g38},${g38} 0 1,1 ${hc},${y25} Z` + const iBody = `M${x32},${y28} L${x37},${y28} L${x37},${y29} L${x35},${y29} L${x35},${y30} L${x37},${y30} L${x37},${y31} L${x32},${y31} L${x32},${y30} L${x34},${y30} L${x34},${y29} L${x32},${y29} Z` + const iconInner = `${dot} ${iBody}` + return [ + { d: `${_rect(w, h)} ${circle}`, fill: 'norm', stroke: false }, + { d: `${circle} ${iconInner}`, fill: 'darken', stroke: false }, + { d: iconInner, fill: 'lighten', stroke: false }, + { d: `${circle} ${iconInner}`, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// actionButtonHome (VBA 0126): house icon with chimney and door +// OOXML spec: 5 paths – bg+house cutout (norm), walls+chimney (darkenLess), roof+door (darken), +// icon outline (stroke), rect outline (stroke) +multiPathPresets.set('actionButtonHome', (w, h) => { + const { g9, g10, g11, g12, g13, hc, vc } = _abGuides(w, h) + // Guide calculations from OOXML presetShapeDefinitions.xml + const g14 = g13 / 16 + const g15 = g13 / 8 + const g16 = (g13 * 3) / 16 + const g17 = (g13 * 5) / 16 + const g18 = (g13 * 7) / 16 + const g19 = (g13 * 9) / 16 + const g20 = (g13 * 11) / 16 + const g21 = (g13 * 3) / 4 + const g22 = (g13 * 13) / 16 + const g23 = (g13 * 7) / 8 + + // Absolute positions + const g24 = g9 + g14 + const g25 = g9 + g16 + const g26 = g9 + g17 + const g27 = g9 + g21 + const g28 = g11 + g15 + const g29 = g11 + g18 + const g30 = g11 + g19 + const g31 = g11 + g20 + const g32 = g11 + g22 + const g33 = g11 + g23 + + // Path 0: background rect + full house outline cutout (norm, no stroke) + // House outline: roof triangle → right side → chimney → left side → base + const houseOutline = + `M${hc},${g9} ` + + `L${g11},${vc} L${g28},${vc} L${g28},${g10} L${g33},${g10} L${g33},${vc} L${g12},${vc} ` + + `L${g32},${g26} L${g32},${g24} L${g31},${g24} L${g31},${g25} Z` + + // Path 1: walls + chimney (darkenLess, no stroke) + // Sub-path 1: chimney bar + const chimney = `M${g32},${g26} L${g32},${g24} L${g31},${g24} L${g31},${g25} Z` + // Sub-path 2: house body (walls) with door cutout + const walls = `M${g28},${vc} L${g28},${g10} L${g29},${g10} L${g29},${g27} L${g30},${g27} L${g30},${g10} L${g33},${g10} L${g33},${vc} Z` + + // Path 2: roof triangle + door (darken, no stroke) + const roof = `M${hc},${g9} L${g11},${vc} L${g12},${vc} Z` + const door = `M${g29},${g27} L${g30},${g27} L${g30},${g10} L${g29},${g10} Z` + + // Path 3: icon outline with all detail lines (none fill, stroke) + const iconOutline = + `M${hc},${g9} ` + + `L${g31},${g25} L${g31},${g24} L${g32},${g24} L${g32},${g26} L${g12},${vc} ` + + `L${g33},${vc} L${g33},${g10} L${g28},${g10} L${g28},${vc} L${g11},${vc} Z ` + + // Chimney diagonal line + `M${g31},${g25} L${g32},${g26} ` + + // Horizontal eave line + `M${g33},${vc} L${g28},${vc} ` + + // Door outline + `M${g29},${g10} L${g29},${g27} L${g30},${g27} L${g30},${g10}` + + return [ + { d: `${_rect(w, h)} ${houseOutline}`, fill: 'norm', stroke: false }, + { d: `${chimney} ${walls}`, fill: 'darkenLess', stroke: false }, + { d: `${roof} ${door}`, fill: 'darken', stroke: false }, + { d: iconOutline, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// actionButtonHelp (VBA 0127): question mark "?" inside rectangle +// OOXML spec: 4 paths – bg+icon cutout (norm), icon fill (darken), icon outline (stroke), rect outline (stroke) +multiPathPresets.set('actionButtonHelp', (w, h) => { + const { g9, g11, g13, hc, vc: _vcH } = _abGuides(w, h) + // Guide calculations from OOXML presetShapeDefinitions.xml + const g14 = g13 / 7 + const g15 = (g13 * 3) / 14 + const g16 = (g13 * 2) / 7 + const g19 = (g13 * 3) / 7 + const g20 = (g13 * 4) / 7 + const g21 = (g13 * 17) / 28 + const g23 = (g13 * 21) / 28 + const g24 = (g13 * 11) / 14 + const g41 = g13 / 14 + const g42 = (g13 * 3) / 28 + + // Absolute positions + const g27 = g9 + g16 + const g29 = g9 + g21 + const g30 = g9 + g23 + const g31 = g9 + g24 + const g33 = g11 + g15 + const g36 = g11 + g19 + const g37 = g11 + g20 + + // Helper: OOXML arcTo → SVG arc segment + // Computes endpoint from center (derived from current point + start angle) and returns SVG A command + const arcSeg = ( + curX: number, + curY: number, + wR: number, + hR: number, + stDeg: number, + swDeg: number + ) => { + const stRad = (stDeg * Math.PI) / 180 + const endRad = ((stDeg + swDeg) * Math.PI) / 180 + const cx = curX - wR * Math.cos(stRad) + const cy = curY - hR * Math.sin(stRad) + const endX = cx + wR * Math.cos(endRad) + const endY = cy + hR * Math.sin(endRad) + const largeArc = Math.abs(swDeg) > 180 ? 1 : 0 + const sweep = swDeg > 0 ? 1 : 0 + return { endX, endY, svg: `A${wR},${hR} 0 ${largeArc},${sweep} ${endX},${endY}` } + } + + // Build question mark path following OOXML arcTo sequence exactly + // Start at (g33, g27) + let cx = g33 + let cy = g27 + + // Arc 1: wR=g16 hR=g16 stAng=180° swAng=180° (top semicircle, clockwise) + const a1 = arcSeg(cx, cy, g16, g16, 180, 180) + cx = a1.endX + cy = a1.endY + + // Arc 2: wR=g14 hR=g15 stAng=0° swAng=90° (curve down right) + const a2 = arcSeg(cx, cy, g14, g15, 0, 90) + cx = a2.endX + cy = a2.endY + + // Arc 3: wR=g41 hR=g42 stAng=270° swAng=-90° (small reverse curve) + const a3 = arcSeg(cx, cy, g41, g42, 270, -90) + // After arc 3, lines to stem + // lnTo (g37, g30), (g36, g30), (g36, g29) + // then more arcs back up + + // Arc 4: wR=g14 hR=g15 stAng=180° swAng=90° (inner curve going up) + const a4 = arcSeg(g36, g29, g14, g15, 180, 90) + + // Arc 5: wR=g41 hR=g42 stAng=90° swAng=-90° (small inner reverse curve) + const a5 = arcSeg(a4.endX, a4.endY, g41, g42, 90, -90) + + // Arc 6: wR=g14 hR=g14 stAng=0° swAng=-180° (inner top semicircle, counter-clockwise) + const a6 = arcSeg(a5.endX, a5.endY, g14, g14, 0, -180) + + // Bottom dot circle at (hc, g31) with radius g42 + const dot = `M${hc},${g31} A${g42},${g42} 0 1,1 ${hc},${g31 + g42 * 2} A${g42},${g42} 0 1,1 ${hc},${g31} Z` + + // Question mark path (outer shape with arcs + stem + inner cutout arcs) + const qMark = + `M${g33},${g27} ` + + `${a1.svg} ` + + `${a2.svg} ` + + `${a3.svg} ` + + `L${g37},${g30} L${g36},${g30} L${g36},${g29} ` + + `${a4.svg} ` + + `${a5.svg} ` + + `${a6.svg} Z` + + const icon = `${qMark} ${dot}` + + return [ + { d: `${_rect(w, h)} ${icon}`, fill: 'norm', stroke: false }, // Background with icon cutout + { d: icon, fill: 'darken', stroke: false }, // Darkened icon fill + { d: icon, fill: 'none', stroke: true }, // Icon outline + { d: _rect(w, h), fill: 'none', stroke: true }, // Rect outline + ] +}) + +// actionButtonDocument (VBA 0134): document with folded corner +multiPathPresets.set('actionButtonDocument', (w, h) => { + const ss = Math.min(w, h) + const hc = w / 2 + const vc = h / 2 + const dx2 = (ss * 3) / 8 + const dx1 = (ss * 9) / 32 + const g9 = vc - dx2 + const g10 = vc + dx2 + const g11 = hc - dx1 + const g12 = hc + dx1 + const g13 = (ss * 3) / 16 + const g14 = g12 - g13 + const g15 = g9 + g13 + const doc = `M${g11},${g9} L${g14},${g9} L${g12},${g15} L${g12},${g10} L${g11},${g10} Z` + const fold = `M${g14},${g9} L${g14},${g15} L${g12},${g15} Z` + const outline = `${doc} M${g12},${g15} L${g14},${g15} L${g14},${g9}` + return [ + { d: `${_rect(w, h)} ${doc}`, fill: 'norm', stroke: false }, + { d: doc, fill: 'darkenLess', stroke: false }, + { d: fold, fill: 'darken', stroke: false }, + { d: outline, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// actionButtonMovie (VBA 0136): film strip / camera icon +multiPathPresets.set('actionButtonMovie', (w, h) => { + const { g9, g11, g12, g13 } = _abGuides(w, h) + // Guide values from OOXML presetShapeDefinitions.xml (fractions of g13 = ss*3/4) + const g14 = (g13 * 1455) / 21600 + const g15 = (g13 * 1905) / 21600 + const g16 = (g13 * 2325) / 21600 + const g17 = (g13 * 16155) / 21600 + const g18 = (g13 * 17010) / 21600 + const g19 = (g13 * 19335) / 21600 + const g20 = (g13 * 19725) / 21600 + const g21 = (g13 * 20595) / 21600 + const g22 = (g13 * 5280) / 21600 + const g23 = (g13 * 5730) / 21600 + const g24 = (g13 * 6630) / 21600 + const g25 = (g13 * 7492) / 21600 + const g26 = (g13 * 9067) / 21600 + const g27 = (g13 * 9555) / 21600 + const g28 = (g13 * 13342) / 21600 + const g29 = (g13 * 14580) / 21600 + const g30 = (g13 * 15592) / 21600 + // Composite guides: x = g11 + gN, y = g9 + gN + const x31 = g11 + g14 + const x32 = g11 + g15 + const x33 = g11 + g16 + const x34 = g11 + g17 + const x35 = g11 + g18 + const x36 = g11 + g19 + const x37 = g11 + g20 + const x38 = g11 + g21 + const y39 = g9 + g22 + const y40 = g9 + g23 + const y41 = g9 + g24 + const y42 = g9 + g25 + const y43 = g9 + g26 + const y44 = g9 + g27 + const y45 = g9 + g28 + const y46 = g9 + g29 + const y47 = g9 + g30 + const icon = [ + `M${g11},${y39}`, + `L${g11},${y44}`, + `L${x31},${y44}`, + `L${x32},${y43}`, + `L${x33},${y43}`, + `L${x33},${y47}`, + `L${x35},${y47}`, + `L${x35},${y45}`, + `L${x36},${y45}`, + `L${x38},${y46}`, + `L${g12},${y46}`, + `L${g12},${y41}`, + `L${x38},${y41}`, + `L${x37},${y42}`, + `L${x35},${y42}`, + `L${x35},${y41}`, + `L${x34},${y40}`, + `L${x32},${y40}`, + `L${x31},${y39}`, + `Z`, + ].join(' ') + return [ + { d: `${_rect(w, h)} ${icon}`, fill: 'norm', stroke: false }, + { d: icon, fill: 'darken', stroke: false }, + { d: icon, fill: 'none', stroke: true }, + { d: _rect(w, h), fill: 'none', stroke: true }, + ] +}) + +// flowChartOfflineStorage (VBA 0139): inverted triangle with horizontal base line +multiPathPresets.set('flowChartOfflineStorage', (w, h) => { + const tri = `M0,0 L${w},0 L${w / 2},${h} Z` + const lineY = (h * 4) / 5 + const line = `M${(w * 2) / 5},${lineY} L${(w * 3) / 5},${lineY}` + return [ + { d: tri, fill: 'norm', stroke: false }, + { d: line, fill: 'none', stroke: true }, + { d: tri, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('cube', (w, h, adjustments) => { + const a = Math.min(Math.max(adj(adjustments, 'adj', 25000), 0), 0.45) + const depth = Math.min(w, h) * a + const front = [ + `M0,${depth}`, + `L${w - depth},${depth}`, + `L${w - depth},${h}`, + `L0,${h}`, + 'Z', + ].join(' ') + const top = [`M0,${depth}`, `L${depth},0`, `L${w},0`, `L${w - depth},${depth}`, 'Z'].join(' ') + const right = [ + `M${w - depth},${depth}`, + `L${w},0`, + `L${w},${h - depth}`, + `L${w - depth},${h}`, + 'Z', + ].join(' ') + return [ + { d: front, fill: 'norm', stroke: true }, + { d: top, fill: 'lightenLess', stroke: true }, + { d: right, fill: 'darkenLess', stroke: true }, + ] +}) + +multiPathPresets.set('bevel', (w, h, adjustments) => { + // OOXML bevel: picture-frame shape with 4 beveled faces + center rect. + // adj = bevel thickness (default 12500 = 12.5% of min(w,h)) + const a = Math.min(Math.max(adj(adjustments, 'adj', 12500), 0), 0.45) + const t = Math.min(w, h) * a + const inner = `M${t},${t} L${w - t},${t} L${w - t},${h - t} L${t},${h - t} Z` + const top = `M0,0 L${w},0 L${w - t},${t} L${t},${t} Z` + const bottom = `M0,${h} L${t},${h - t} L${w - t},${h - t} L${w},${h} Z` + const left = `M0,0 L${t},${t} L${t},${h - t} L0,${h} Z` + const right = `M${w},0 L${w},${h} L${w - t},${h - t} L${w - t},${t} Z` + return [ + { d: inner, fill: 'norm', stroke: true }, + { d: top, fill: 'lightenLess', stroke: true }, + { d: right, fill: 'darkenLess', stroke: true }, + { d: bottom, fill: 'darken', stroke: true }, + { d: left, fill: 'lighten', stroke: true }, + ] +}) + +multiPathPresets.set('leftRightRibbon', (w, h, adjustments) => { + // OOXML leftRightRibbon: 3-path shape (body + center fold shadow + stroke outline). + // adj1=50000 (band height), adj2=50000 (notch width), adj3=16667 (wave amplitude). + const ss = Math.min(w, h) + const wd2 = w / 2 + const wd32 = w / 32 + const hc = w / 2 + const vc = h / 2 + + const a3 = Math.min(Math.max((adjustments?.get('adj3') ?? 16667) / 100000, 0), 0.33333) + const maxAdj1 = 1 - a3 + const a1 = Math.min(Math.max((adjustments?.get('adj1') ?? 50000) / 100000, 0), maxAdj1) + const w1 = wd2 - wd32 + const maxAdj2 = w1 / ss + const a2 = Math.min(Math.max((adjustments?.get('adj2') ?? 50000) / 100000, 0), maxAdj2) + + const x1 = ss * a2 + const x4 = w - x1 + const dy1 = (h * a1) / 2 + const dy2 = (-h * a3) / 2 + + const ly1 = vc + dy2 - dy1 + const ry4 = vc + dy1 - dy2 + const ly2 = ly1 + dy1 + const ry3 = h - ly2 + const ly4 = ly2 * 2 + const ry1 = h - ly4 + const ly3 = ly4 - ly1 + const ry2 = h - ly3 + + const hR = (a3 * ss) / 4 + const x2 = hc - wd32 + const x3 = hc + wd32 + const y1 = ly1 + hR + const y2 = ry2 - hR + + // Helper: compute OOXML arcTo → SVG arc segment + const arcTo = ( + curX: number, + curY: number, + wR: number, + hRad: number, + stDeg: number, + swDeg: number + ) => { + const stRad = (stDeg * Math.PI) / 180 + const endRad = ((stDeg + swDeg) * Math.PI) / 180 + const cx = curX - wR * Math.cos(stRad) + const cy = curY - hRad * Math.sin(stRad) + const endX = cx + wR * Math.cos(endRad) + const endY = cy + hRad * Math.sin(endRad) + const largeArc = Math.abs(swDeg) > 180 ? 1 : 0 + const sweep = swDeg > 0 ? 1 : 0 + return { endX, endY, svg: `A${wR},${hRad} 0 ${largeArc},${sweep} ${endX},${endY}` } + } + + // Path 1: Main body (fill, no stroke) + const cx1 = hc + const cy1 = ly1 // after lnTo (hc, ly1) + const arc1a = arcTo(cx1, cy1, wd32, hR, 270, 180) + const arc1b = arcTo(arc1a.endX, arc1a.endY, wd32, hR, 270, -180) + const cx1c = hc + const cy1c = ry4 // after lnTo (hc, ry4) + const arc1c = arcTo(cx1c, cy1c, wd32, hR, 90, 90) + + const body = [ + `M0,${ly2}`, + `L${x1},0`, + `L${x1},${ly1}`, + `L${hc},${ly1}`, + arc1a.svg, + arc1b.svg, + `L${x4},${ry2}`, + `L${x4},${ry1}`, + `L${w},${ry3}`, + `L${x4},${h}`, + `L${x4},${ry4}`, + `L${hc},${ry4}`, + arc1c.svg, + `L${x2},${ly3}`, + `L${x1},${ly3}`, + `L${x1},${ly4}`, + 'Z', + ].join(' ') + + // Path 2: Center fold shadow (darkenLess, no stroke) + const arc2a = arcTo(x3, y1, wd32, hR, 0, 90) + const arc2b = arcTo(arc2a.endX, arc2a.endY, wd32, hR, 270, -180) + + const shadow = [`M${x3},${y1}`, arc2a.svg, arc2b.svg, `L${x3},${ry2}`, 'Z'].join(' ') + + // Path 3: Stroke outline (no fill) — same as body + interior fold lines + const outline = [body, `M${x3},${y1} L${x3},${ry2}`, `M${x2},${y2} L${x2},${ly3}`].join(' ') + + return [ + { d: body, fill: 'norm', stroke: false }, + { d: shadow, fill: 'darkenLess', stroke: false }, + { d: outline, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('ellipseRibbon', (w, h, adjustments) => { + // OOXML ellipseRibbon: ribbon with parabolic curved bottom edge + // 3 paths: body (fill=norm), darkenLess shadow folds, outline (fill=none) + const adj1 = adjustments?.get('adj1') ?? 25000 + const adj2 = adjustments?.get('adj2') ?? 50000 + const adj3 = adjustments?.get('adj3') ?? 12500 + + const a1 = Math.max(0, Math.min(adj1, 100000)) + const a2 = Math.max(25000, Math.min(adj2, 75000)) + const q10 = 100000 - a1 + const q11 = q10 / 2 + const q12 = a1 - q11 + const minAdj3 = Math.max(0, q12) + const a3 = Math.max(minAdj3, Math.min(adj3, a1)) + + const dx2 = (w * a2) / 200000 + const x2 = w / 2 - dx2 + const x3 = x2 + w / 8 + const x4 = w - x3 + const x5 = w - x2 + const x6 = w - w / 8 + + const dy1 = (h * a3) / 100000 + const f1 = w > 0 ? (4 * dy1) / w : 0 + // Parabola: p(x) = f1 * x * (1 - x/w) + const parab = (x: number) => f1 * (x - (x * x) / w) + + const y1 = parab(x3) + const cx1 = x3 / 2 + const cy1 = f1 * cx1 // Bezier control (approximation) + const cx2 = w - cx1 + + // q1 redefined: total fold height + const q1 = (h * a1) / 100000 + const dy3 = q1 - dy1 + + const q5 = parab(x2) + const y3 = q5 + dy3 + + const q6 = dy1 + dy3 - y3 + const q7 = q6 + dy1 + const cy3 = q7 + dy3 + + const rh = h - q1 + + const q8 = (dy1 * 14) / 16 + const y2 = (q8 + rh) / 2 + + const y5 = q5 + rh + const y6 = y3 + rh + + const cx4 = x2 / 2 + const cy4 = f1 * cx4 + rh + const cx5 = w - cx4 + + const cy6 = cy3 + rh + + const y7 = y1 + dy3 + const cy7 = q1 + q1 - y7 + + const hc = w / 2 + const wd8 = w / 8 + + // Path 1: body fill (stroke=false) + const body = [ + `M0,0`, + `Q${cx1},${cy1} ${x3},${y1}`, + `L${x2},${y3}`, + `Q${hc},${cy3} ${x5},${y3}`, + `L${x4},${y1}`, + `Q${cx2},${cy1} ${w},0`, + `L${x6},${y2}`, + `L${w},${rh}`, + `Q${cx5},${cy4} ${x5},${y5}`, + `L${x5},${y6}`, + `Q${hc},${cy6} ${x2},${y6}`, + `L${x2},${y5}`, + `Q${cx4},${cy4} 0,${rh}`, + `L${wd8},${y2}`, + `Z`, + ].join(' ') + + // Path 2: darkenLess shadow folds (stroke=false) + const shadow = [ + `M${x3},${y7}`, + `L${x3},${y1}`, + `L${x2},${y3}`, + `Q${hc},${cy3} ${x5},${y3}`, + `L${x4},${y1}`, + `L${x4},${y7}`, + `Q${hc},${cy7} ${x3},${y7}`, + `Z`, + ].join(' ') + + // Path 3: outline (fill=none) + const outline = [ + `M0,0`, + `Q${cx1},${cy1} ${x3},${y1}`, + `L${x2},${y3}`, + `Q${hc},${cy3} ${x5},${y3}`, + `L${x4},${y1}`, + `Q${cx2},${cy1} ${w},0`, + `L${x6},${y2}`, + `L${w},${rh}`, + `Q${cx5},${cy4} ${x5},${y5}`, + `L${x5},${y6}`, + `Q${hc},${cy6} ${x2},${y6}`, + `L${x2},${y5}`, + `Q${cx4},${cy4} 0,${rh}`, + `L${wd8},${y2}`, + `Z`, + `M${x2},${y5} L${x2},${y3}`, + `M${x5},${y3} L${x5},${y5}`, + `M${x3},${y1} L${x3},${y7}`, + `M${x4},${y7} L${x4},${y1}`, + ].join(' ') + + return [ + { d: body, fill: 'norm', stroke: false }, + { d: shadow, fill: 'darkenLess', stroke: false }, + { d: outline, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('ellipseRibbon2', (w, h, adjustments) => { + // OOXML ellipseRibbon2: inverted ribbon with parabolic curved top edge + // 3 paths: body (fill=norm), darkenLess shadow folds, outline (fill=none) + // All y-values computed as b - value (measured from bottom) + const adj1 = adjustments?.get('adj1') ?? 25000 + const adj2 = adjustments?.get('adj2') ?? 50000 + const adj3 = adjustments?.get('adj3') ?? 12500 + + const a1 = Math.max(0, Math.min(adj1, 100000)) + const a2 = Math.max(25000, Math.min(adj2, 75000)) + const q10 = 100000 - a1 + const q11 = q10 / 2 + const q12 = a1 - q11 + const minAdj3 = Math.max(0, q12) + const a3 = Math.max(minAdj3, Math.min(adj3, a1)) + + const b = h + const dx2 = (w * a2) / 200000 + const x2 = w / 2 - dx2 + const x3 = x2 + w / 8 + const x4 = w - x3 + const x5 = w - x2 + const x6 = w - w / 8 + + const dy1 = (h * a3) / 100000 + const f1 = w > 0 ? (4 * dy1) / w : 0 + + // u1 = parabola at x3 + const u1 = f1 * (x3 - (x3 * x3) / w) + const y1 = b - u1 + + const cx1 = x3 / 2 + const cu1 = f1 * cx1 + const cy1 = b - cu1 + const cx2 = w - cx1 + + // q1 redefined: total fold height + const q1 = (h * a1) / 100000 + const dy3 = q1 - dy1 + + const q5 = f1 * (x2 - (x2 * x2) / w) + const u3 = q5 + dy3 + const y3 = b - u3 + + const q6 = dy1 + dy3 - u3 + const q7 = q6 + dy1 + const cu3 = q7 + dy3 + const cy3 = b - cu3 + + const rh = b - q1 + + const q8 = (dy1 * 14) / 16 + const u2 = (q8 + rh) / 2 + const y2 = b - u2 + + const u5 = q5 + rh + const y5 = b - u5 + + const u6 = u3 + rh + const y6 = b - u6 + + const cx4 = x2 / 2 + const cu4 = f1 * cx4 + rh + const cy4 = b - cu4 + const cx5 = w - cx4 + + const cu6 = cu3 + rh + const cy6 = b - cu6 + + const u7 = u1 + dy3 + const y7 = b - u7 + const cu7 = q1 + q1 - u7 + const cy7 = b - cu7 + + const hc = w / 2 + const wd8 = w / 8 + + // Path 1: body fill (stroke=false) + const body = [ + `M0,${b}`, + `Q${cx1},${cy1} ${x3},${y1}`, + `L${x2},${y3}`, + `Q${hc},${cy3} ${x5},${y3}`, + `L${x4},${y1}`, + `Q${cx2},${cy1} ${w},${b}`, + `L${x6},${y2}`, + `L${w},${q1}`, + `Q${cx5},${cy4} ${x5},${y5}`, + `L${x5},${y6}`, + `Q${hc},${cy6} ${x2},${y6}`, + `L${x2},${y5}`, + `Q${cx4},${cy4} 0,${q1}`, + `L${wd8},${y2}`, + `Z`, + ].join(' ') + + // Path 2: darkenLess shadow folds (stroke=false) + const shadow = [ + `M${x3},${y7}`, + `L${x3},${y1}`, + `L${x2},${y3}`, + `Q${hc},${cy3} ${x5},${y3}`, + `L${x4},${y1}`, + `L${x4},${y7}`, + `Q${hc},${cy7} ${x3},${y7}`, + `Z`, + ].join(' ') + + // Path 3: outline (fill=none) + const outline = [ + `M0,${b}`, + `L${wd8},${y2}`, + `L0,${q1}`, + `Q${cx4},${cy4} ${x2},${y5}`, + `L${x2},${y6}`, + `Q${hc},${cy6} ${x5},${y6}`, + `L${x5},${y5}`, + `Q${cx5},${cy4} ${w},${q1}`, + `L${x6},${y2}`, + `L${w},${b}`, + `Q${cx2},${cy1} ${x4},${y1}`, + `L${x5},${y3}`, + `Q${hc},${cy3} ${x2},${y3}`, + `L${x3},${y1}`, + `Q${cx1},${cy1} 0,${b}`, + `Z`, + `M${x2},${y3} L${x2},${y5}`, + `M${x5},${y5} L${x5},${y3}`, + `M${x3},${y7} L${x3},${y1}`, + `M${x4},${y1} L${x4},${y7}`, + ].join(' ') + + return [ + { d: body, fill: 'norm', stroke: false }, + { d: shadow, fill: 'darkenLess', stroke: false }, + { d: outline, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('smileyFace', (w, h, adjustments) => { + // OOXML smileyFace: 4 paths — face(norm), eyes(darkenLess), smile(none), outline(none+stroke) + const wd2 = w / 2 + const hd2 = h / 2 + const hc = w / 2 + const vc = h / 2 + + // Adjustment: smile amplitude (default 4653, range -4653..4653) + const rawAdj = adjustments?.get('adj') ?? 4653 + const a = Math.max(-4653, Math.min(rawAdj, 4653)) + + // Eye positions (OOXML exact) + const x2 = (w * 6215) / 21600 + const x3 = (w * 13135) / 21600 + const y1 = (h * 7570) / 21600 + const wR = (w * 1125) / 21600 + const hR = (h * 1125) / 21600 + + // Smile curve positions (OOXML exact) + const x1 = (w * 4969) / 21699 + const x4 = (w * 16640) / 21600 + const y3 = (h * 16515) / 21600 + const dy2 = (h * a) / 100000 + const y2 = y3 - dy2 + const y4 = y3 + dy2 + const dy3 = (h * a) / 50000 + const y5 = y4 + dy3 + + // Path 1: face ellipse (fill=norm, stroke=false) — two half-arcs for full circle + const face = `M${w},${vc} A${wd2},${hd2} 0 1,1 0,${vc} A${wd2},${hd2} 0 1,1 ${w},${vc} Z` + + // Path 2: eyes (fill=darkenLess) — two small ellipses at OOXML positions (two half-arcs each) + const leftEye = `M${(x2 + wR).toFixed(2)},${y1.toFixed(2)} A${wR.toFixed(2)},${hR.toFixed(2)} 0 1,1 ${(x2 - wR).toFixed(2)},${y1.toFixed(2)} A${wR.toFixed(2)},${hR.toFixed(2)} 0 1,1 ${(x2 + wR).toFixed(2)},${y1.toFixed(2)} Z` + const rightEye = `M${(x3 + wR).toFixed(2)},${y1.toFixed(2)} A${wR.toFixed(2)},${hR.toFixed(2)} 0 1,1 ${(x3 - wR).toFixed(2)},${y1.toFixed(2)} A${wR.toFixed(2)},${hR.toFixed(2)} 0 1,1 ${(x3 + wR).toFixed(2)},${y1.toFixed(2)} Z` + + // Path 3: smile (fill=none) — quadratic Bezier (OOXML quadBezTo) + const smile = `M${x1.toFixed(2)},${y2.toFixed(2)} Q${hc.toFixed(2)},${y5.toFixed(2)} ${x4.toFixed(2)},${y2.toFixed(2)}` + + // Path 4: face outline (fill=none, stroke=true) — same as path 1 + const outline = `M${w},${vc} A${wd2},${hd2} 0 1,1 0,${vc} A${wd2},${hd2} 0 1,1 ${w},${vc} Z` + + return [ + { d: face, fill: 'norm', stroke: false }, + { d: `${leftEye} ${rightEye}`, fill: 'darkenLess', stroke: false }, + { d: smile, fill: 'none', stroke: true }, + { d: outline, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('foldedCorner', (w, h, adjustments) => { + const a = adj(adjustments, 'adj', 16667) + const fold = Math.min(w, h) * a * 0.7 + const body = `M0,0 L${w},0 L${w},${h - fold} L${w - fold},${h} L0,${h} Z` + const foldFace = `M${w - fold},${h} L${w - fold},${h - fold} L${w},${h - fold} Z` + const crease = `M${w - fold},${h} L${w - fold},${h - fold}` + return [ + { d: body, fill: 'norm', stroke: true }, + { d: foldFace, fill: 'darkenLess', stroke: false }, + { d: crease, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('can', (w, h, adjustments) => { + // OOXML: 3 paths — body (norm), top face (lighten), outline (stroke-only) + const ss = Math.min(w, h) + const maxAdj = (50000 * h) / ss + const a = Math.min(Math.max(adjustments?.get('adj') ?? 25000, 0), maxAdj) + const y1 = (ss * a) / 200000 + const y3 = h - y1 + const wd2 = w / 2 + const arcSeg = ( + curX: number, + curY: number, + wR: number, + hR: number, + stDeg: number, + swDeg: number + ) => { + const stRad = (stDeg * Math.PI) / 180 + const endRad = ((stDeg + swDeg) * Math.PI) / 180 + const cx = curX - wR * Math.cos(stRad) + const cy = curY - hR * Math.sin(stRad) + const endX = cx + wR * Math.cos(endRad) + const endY = cy + hR * Math.sin(endRad) + const largeArc = Math.abs(swDeg) > 180 ? 1 : 0 + const sweep = swDeg > 0 ? 1 : 0 + return { endX, endY, svg: `A${wR},${hR} 0 ${largeArc},${sweep} ${endX},${endY}` } + } + // Path 1: Body (stroke:false, fill:norm) + const a1 = arcSeg(0, y1, wd2, y1, 180, -180) + const a2 = arcSeg(w, y3, wd2, y1, 0, 180) + const body = `M0,${y1} ${a1.svg} L${w},${y3} ${a2.svg} Z` + // Path 2: Top face (stroke:false, fill:lighten) + const a3 = arcSeg(0, y1, wd2, y1, 180, 180) + const a4 = arcSeg(a3.endX, a3.endY, wd2, y1, 0, 180) + const topFace = `M0,${y1} ${a3.svg} ${a4.svg} Z` + // Path 3: Outline (fill:none, stroke:true) + const a5 = arcSeg(w, y1, wd2, y1, 0, 180) + const a6 = arcSeg(a5.endX, a5.endY, wd2, y1, 180, 180) + const a7 = arcSeg(w, y3, wd2, y1, 0, 180) + const outline = `M${w},${y1} ${a5.svg} ${a6.svg} L${w},${y3} ${a7.svg} L0,${y1}` + return [ + { d: body, fill: 'norm', stroke: false }, + { d: topFace, fill: 'lighten', stroke: false }, + { d: outline, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('curvedrightarrow', (w, h, adjustments) => + buildCurvedArrowMultiPath('curvedRightArrow', w, h, adjustments) +) + +multiPathPresets.set('curvedleftarrow', (w, h, adjustments) => + buildCurvedArrowMultiPath('curvedLeftArrow', w, h, adjustments) +) + +multiPathPresets.set('curveduparrow', (w, h, adjustments) => + buildCurvedVerticalArrowMultiPath('curvedUpArrow', w, h, adjustments) +) + +multiPathPresets.set('curveddownarrow', (w, h, adjustments) => + buildCurvedVerticalArrowMultiPath('curvedDownArrow', w, h, adjustments) +) + +multiPathPresets.set('bordercallout1', (w, h, adjustments) => { + // OOXML: filled+stroked rectangle body + separate leader line (stroke-only). + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 112500)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -38333)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('accentcallout1', (w, h, adjustments) => { + // OOXML: filled rect + accent bar at x1 + 1-segment callout line + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 112500)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -38333)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { d: `M${x1},0 L${x1},${h}`, fill: 'none', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('accentcallout2', (w, h, adjustments) => { + // OOXML: filled rect + accent bar at x1 + 2-segment callout line + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 18750)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -16667)) / 100000 + const y3 = (h * (adjustments?.get('adj5') ?? 112500)) / 100000 + const x3 = (w * (adjustments?.get('adj6') ?? -46667)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { d: `M${x1},0 L${x1},${h}`, fill: 'none', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2} L${x3},${y3}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('accentcallout3', (w, h, adjustments) => { + // OOXML: filled rect + accent bar at x1 + 3-segment callout line + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 18750)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -16667)) / 100000 + const y3 = (h * (adjustments?.get('adj5') ?? 100000)) / 100000 + const x3 = (w * (adjustments?.get('adj6') ?? -16667)) / 100000 + const y4 = (h * (adjustments?.get('adj7') ?? 112963)) / 100000 + const x4 = (w * (adjustments?.get('adj8') ?? -8333)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { d: `M${x1},0 L${x1},${h}`, fill: 'none', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2} L${x3},${y3} L${x4},${y4}`, fill: 'none', stroke: true }, + ] +}) + +// --- callout1/2/3: filled rect (no stroke) + callout line segments --- +multiPathPresets.set('callout1', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 112500)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -38333)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { d: `M${x1},${y1} L${x2},${y2}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('callout2', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 18750)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -16667)) / 100000 + const y3 = (h * (adjustments?.get('adj5') ?? 112500)) / 100000 + const x3 = (w * (adjustments?.get('adj6') ?? -46667)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { d: `M${x1},${y1} L${x2},${y2} L${x3},${y3}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('callout3', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 18750)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -16667)) / 100000 + const y3 = (h * (adjustments?.get('adj5') ?? 100000)) / 100000 + const x3 = (w * (adjustments?.get('adj6') ?? -16667)) / 100000 + const y4 = (h * (adjustments?.get('adj7') ?? 112963)) / 100000 + const x4 = (w * (adjustments?.get('adj8') ?? -8333)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { d: `M${x1},${y1} L${x2},${y2} L${x3},${y3} L${x4},${y4}`, fill: 'none', stroke: true }, + ] +}) + +// --- borderCallout2/3: filled+stroked rect + callout line segments --- +multiPathPresets.set('bordercallout2', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 18750)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -16667)) / 100000 + const y3 = (h * (adjustments?.get('adj5') ?? 112500)) / 100000 + const x3 = (w * (adjustments?.get('adj6') ?? -46667)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2} L${x3},${y3}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('bordercallout3', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 18750)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -16667)) / 100000 + const y3 = (h * (adjustments?.get('adj5') ?? 100000)) / 100000 + const x3 = (w * (adjustments?.get('adj6') ?? -16667)) / 100000 + const y4 = (h * (adjustments?.get('adj7') ?? 112963)) / 100000 + const x4 = (w * (adjustments?.get('adj8') ?? -8333)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2} L${x3},${y3} L${x4},${y4}`, fill: 'none', stroke: true }, + ] +}) + +// --- accentBorderCallout1/2/3: filled+stroked rect + accent bar + callout line --- +multiPathPresets.set('accentbordercallout1', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 112500)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -38333)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: true }, + { d: `M${x1},0 L${x1},${h}`, fill: 'none', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('accentbordercallout2', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 18750)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -16667)) / 100000 + const y3 = (h * (adjustments?.get('adj5') ?? 112500)) / 100000 + const x3 = (w * (adjustments?.get('adj6') ?? -46667)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: true }, + { d: `M${x1},0 L${x1},${h}`, fill: 'none', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2} L${x3},${y3}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('accentbordercallout3', (w, h, adjustments) => { + const y1 = (h * (adjustments?.get('adj1') ?? 18750)) / 100000 + const x1 = (w * (adjustments?.get('adj2') ?? -8333)) / 100000 + const y2 = (h * (adjustments?.get('adj3') ?? 18750)) / 100000 + const x2 = (w * (adjustments?.get('adj4') ?? -16667)) / 100000 + const y3 = (h * (adjustments?.get('adj5') ?? 100000)) / 100000 + const x3 = (w * (adjustments?.get('adj6') ?? -16667)) / 100000 + const y4 = (h * (adjustments?.get('adj7') ?? 112963)) / 100000 + const x4 = (w * (adjustments?.get('adj8') ?? -8333)) / 100000 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: true }, + { d: `M${x1},0 L${x1},${h}`, fill: 'none', stroke: true }, + { d: `M${x1},${y1} L${x2},${y2} L${x3},${y3} L${x4},${y4}`, fill: 'none', stroke: true }, + ] +}) + +// Chart placeholders: frame + guide lines. +// PowerPoint uses these as pre-chart placeholders (chartX / chartPlus / chartStar). +multiPathPresets.set('chartx', (w, h) => { + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { d: `M0,0 L${w},${h} M${w},0 L0,${h}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('chartplus', (w, h) => { + const cx = w / 2 + const cy = h / 2 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { d: `M${cx},0 L${cx},${h} M0,${cy} L${w},${cy}`, fill: 'none', stroke: true }, + ] +}) + +multiPathPresets.set('chartstar', (w, h) => { + // OOXML: 3 guide paths — 2 diagonals + 1 vertical (no horizontal center line) + const cx = w / 2 + return [ + { d: `M0,0 L${w},0 L${w},${h} L0,${h} Z`, fill: 'norm', stroke: false }, + { + d: `M0,0 L${w},${h} M${w},0 L0,${h} M${cx},0 L${cx},${h}`, + fill: 'none', + stroke: true, + }, + ] +}) + +/** + * Helper: compute OOXML arcTo endpoint and SVG arc command from current position. + * OOXML arcTo: center = curPos - radius*dir(stAng), endpoint = center + radius*dir(stAng+swAng) + * Returns { svgArc, endX, endY } + */ +function ooArcTo( + curX: number, + curY: number, + wR: number, + hR: number, + stAngDeg: number, + swAngDeg: number +): { svg: string; x: number; y: number } { + const stRad = (stAngDeg * Math.PI) / 180 + const cx = curX - wR * Math.cos(stRad) + const cy = curY - hR * Math.sin(stRad) + const endRad = ((stAngDeg + swAngDeg) * Math.PI) / 180 + const ex = cx + wR * Math.cos(endRad) + const ey = cy + hR * Math.sin(endRad) + const absSweep = Math.abs(swAngDeg) + const largeArc = absSweep > 180 ? 1 : 0 + const sweepFlag = swAngDeg >= 0 ? 1 : 0 + return { svg: `A${wR},${hR} 0 ${largeArc},${sweepFlag} ${ex},${ey}`, x: ex, y: ey } +} + +// --- ribbon (OOXML spec: 3 paths with arcTo, adj1=16667, adj2=50000) --- +// Ribbon with tails at top, front panel at bottom. Three paths: body, darkenLess folds, outline. +multiPathPresets.set('ribbon', (w, h, adjustments) => { + const adj1Raw = adjustments?.get('adj1') ?? 16667 + const adj2Raw = adjustments?.get('adj2') ?? 50000 + const a1 = Math.min(Math.max(adj1Raw, 0), 33333) + const a2 = Math.min(Math.max(adj2Raw, 25000), 75000) + + const hc = w / 2 + const wd8 = w / 8 + const wd32 = w / 32 + const x10 = w - wd8 + const dx2 = (w * a2) / 200000 + const x2 = hc - dx2 + const x9 = hc + dx2 + const x3 = x2 + wd32 + const x8 = x9 - wd32 + const x5 = x2 + wd8 + const x6 = x9 - wd8 + const x4 = x5 - wd32 + const x7 = x6 + wd32 + const y1 = (h * a1) / 200000 + const y2 = (h * a1) / 100000 + const y4 = h - y2 + const y3 = y4 / 2 + const hR = (h * a1) / 400000 + const y5 = h - hR + const y6 = y2 - hR + + let cx: number + let cy: number + let arc + + // Path 1: body fill (stroke=false) + const p1: string[] = [] + cx = 0 + cy = 0 + p1.push(`M${0},${0}`) + p1.push(`L${x4},${0}`) + cx = x4 + cy = 0 + arc = ooArcTo(cx, cy, wd32, hR, 270, 180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x3},${y1}`) + cx = x3 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 270, -180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x8},${y2}`) + cx = x8 + cy = y2 + arc = ooArcTo(cx, cy, wd32, hR, 90, -180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x7},${y1}`) + cx = x7 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 90, 180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${w},${0}`) + p1.push(`L${x10},${y3}`) + p1.push(`L${w},${y4}`) + p1.push(`L${x9},${y4}`) + p1.push(`L${x9},${y5}`) + cx = x9 + cy = y5 + arc = ooArcTo(cx, cy, wd32, hR, 0, 90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x3},${h}`) + cx = x3 + cy = h + arc = ooArcTo(cx, cy, wd32, hR, 90, 90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x2},${y4}`) + p1.push(`L${0},${y4}`) + p1.push(`L${wd8},${y3}`) + p1.push('Z') + + // Path 2: darkenLess folds (stroke=false) + const p2: string[] = [] + // Left fold + cx = x5 + cy = hR + p2.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, wd32, hR, 0, 90) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + p2.push(`L${x3},${y1}`) + cx = x3 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 270, -180) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + p2.push(`L${x5},${y2}`) + p2.push('Z') + // Right fold + cx = x6 + cy = hR + p2.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, wd32, hR, 180, -90) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + p2.push(`L${x8},${y1}`) + cx = x8 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 270, 180) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + p2.push(`L${x6},${y2}`) + p2.push('Z') + + // Path 3: outline (fill=none, includes fold lines) + const p3: string[] = [] + cx = 0 + cy = 0 + p3.push(`M${0},${0}`) + p3.push(`L${x4},${0}`) + cx = x4 + cy = 0 + arc = ooArcTo(cx, cy, wd32, hR, 270, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x3},${y1}`) + cx = x3 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 270, -180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x8},${y2}`) + cx = x8 + cy = y2 + arc = ooArcTo(cx, cy, wd32, hR, 90, -180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x7},${y1}`) + cx = x7 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 90, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${w},${0}`) + p3.push(`L${x10},${y3}`) + p3.push(`L${w},${y4}`) + p3.push(`L${x9},${y4}`) + p3.push(`L${x9},${y5}`) + cx = x9 + cy = y5 + arc = ooArcTo(cx, cy, wd32, hR, 0, 90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x3},${h}`) + cx = x3 + cy = h + arc = ooArcTo(cx, cy, wd32, hR, 90, 90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x2},${y4}`) + p3.push(`L${0},${y4}`) + p3.push(`L${wd8},${y3}`) + p3.push('Z') + // Fold lines + p3.push(`M${x5},${hR} L${x5},${y2}`) + p3.push(`M${x6},${y2} L${x6},${hR}`) + p3.push(`M${x2},${y4} L${x2},${y6}`) + p3.push(`M${x9},${y6} L${x9},${y4}`) + + return [ + { d: p1.join(' '), fill: 'norm', stroke: false }, + { d: p2.join(' '), fill: 'darkenLess', stroke: false }, + { d: p3.join(' '), fill: 'none', stroke: true }, + ] +}) + +// --- ribbon2 (OOXML spec: 3 paths, inverted ribbon with tails at bottom) --- +multiPathPresets.set('ribbon2', (w, h, adjustments) => { + const adj1Raw = adjustments?.get('adj1') ?? 16667 + const adj2Raw = adjustments?.get('adj2') ?? 50000 + const a1 = Math.min(Math.max(adj1Raw, 0), 33333) + const a2 = Math.min(Math.max(adj2Raw, 25000), 75000) + + const hc = w / 2 + const wd8 = w / 8 + const wd32 = w / 32 + const x10 = w - wd8 + const dx2 = (w * a2) / 200000 + const x2 = hc - dx2 + const x9 = hc + dx2 + const x3 = x2 + wd32 + const x8 = x9 - wd32 + const x5 = x2 + wd8 + const x6 = x9 - wd8 + const x4 = x5 - wd32 + const x7 = x6 + wd32 + const dy1 = (h * a1) / 200000 + const y1 = h - dy1 + const dy2 = (h * a1) / 100000 + const y2 = h - dy2 + const y4 = dy2 + const y3 = (y4 + h) / 2 + const hR = (h * a1) / 400000 + const y6 = h - hR + const y7 = y1 - hR + + let cx: number + let cy: number + let arc + + // Path 1: body fill (stroke=false) + const p1: string[] = [] + p1.push(`M${0},${h}`) + p1.push(`L${x4},${h}`) + cx = x4 + cy = h + arc = ooArcTo(cx, cy, wd32, hR, 90, -180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x3},${y1}`) + cx = x3 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 90, 180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x8},${y2}`) + cx = x8 + cy = y2 + arc = ooArcTo(cx, cy, wd32, hR, 270, 180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x7},${y1}`) + cx = x7 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 270, -180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${w},${h}`) + p1.push(`L${x10},${y3}`) + p1.push(`L${w},${y4}`) + p1.push(`L${x9},${y4}`) + p1.push(`L${x9},${hR}`) + cx = x9 + cy = hR + arc = ooArcTo(cx, cy, wd32, hR, 0, -90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x3},${0}`) + cx = x3 + cy = 0 + arc = ooArcTo(cx, cy, wd32, hR, 270, -90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x2},${y4}`) + p1.push(`L${0},${y4}`) + p1.push(`L${wd8},${y3}`) + p1.push('Z') + + // Path 2: darkenLess folds (stroke=false) + const p2: string[] = [] + // Left fold + cx = x5 + cy = y6 + p2.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, wd32, hR, 0, -90) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + p2.push(`L${x3},${y1}`) + cx = x3 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 90, 180) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + p2.push(`L${x5},${y2}`) + p2.push('Z') + // Right fold + cx = x6 + cy = y6 + p2.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, wd32, hR, 180, 90) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + p2.push(`L${x8},${y1}`) + cx = x8 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 90, -180) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + p2.push(`L${x6},${y2}`) + p2.push('Z') + + // Path 3: outline (fill=none) + const p3: string[] = [] + p3.push(`M${0},${h}`) + p3.push(`L${wd8},${y3}`) + p3.push(`L${0},${y4}`) + p3.push(`L${x2},${y4}`) + p3.push(`L${x2},${hR}`) + cx = x2 + cy = hR + arc = ooArcTo(cx, cy, wd32, hR, 180, 90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x8},${0}`) + cx = x8 + cy = 0 + arc = ooArcTo(cx, cy, wd32, hR, 270, 90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x9},${y4}`) + p3.push(`L${w},${y4}`) + p3.push(`L${x10},${y3}`) + p3.push(`L${w},${h}`) + p3.push(`L${x7},${h}`) + cx = x7 + cy = h + arc = ooArcTo(cx, cy, wd32, hR, 90, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x8},${y1}`) + cx = x8 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 90, -180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x3},${y2}`) + cx = x3 + cy = y2 + arc = ooArcTo(cx, cy, wd32, hR, 270, -180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x4},${y1}`) + cx = x4 + cy = y1 + arc = ooArcTo(cx, cy, wd32, hR, 270, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push('Z') + // Fold lines + p3.push(`M${x5},${y2} L${x5},${y6}`) + p3.push(`M${x6},${y6} L${x6},${y2}`) + p3.push(`M${x2},${y7} L${x2},${y4}`) + p3.push(`M${x9},${y4} L${x9},${y7}`) + + return [ + { d: p1.join(' '), fill: 'norm', stroke: false }, + { d: p2.join(' '), fill: 'darkenLess', stroke: false }, + { d: p3.join(' '), fill: 'none', stroke: true }, + ] +}) + +// --- horizontalScroll (OOXML spec: 3 paths with arcTo) --- +multiPathPresets.set('horizontalscroll', (w, h, adjustments) => { + const adjVal = adjustments?.get('adj') ?? 12500 + const a = Math.min(Math.max(adjVal, 0), 25000) + const ss = Math.min(w, h) + const ch = (ss * a) / 100000 + const ch2 = ch / 2 + const ch4 = ch / 4 + + const y3 = ch + ch2 + const y4 = ch + ch + const y6 = h - ch + const y7 = h - ch2 + const y5 = y6 - ch2 + const x3 = w - ch + const x4 = w - ch2 + + // Path 1: main fill (stroke=false) + const p1: string[] = [] + let cx: number + let cy: number + // moveTo (r, ch2) = (w, ch2) + cx = w + cy = ch2 + p1.push(`M${cx},${cy}`) + // arcTo wR=ch2 hR=ch2 stAng=0 swAng=cd4(90°) + let arc = ooArcTo(cx, cy, ch2, ch2, 0, 90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + // lnTo (x4, ch2) — but after the arc we should be at (x4, 0)… wait + // Actually: arcTo from (w, ch2) with stAng=0 swAng=90° → center=(w-ch2, ch2), end=(w-ch2, 0)=x4,0 + // Then lnTo (x4, ch2)... hmm, this goes from top-right curl area + // Let me re-read: lnTo pt x="x4" y="ch2"... that doesn't match. Wait, the lnTo goes DOWN. + // After arc: we're at (x4, 0). lnTo (x4, ch2): + p1.push(`L${x4},${ch2}`) + // arcTo wR=ch4 hR=ch4 stAng=0 swAng=cd2(180°) + arc = ooArcTo(x4, ch2, ch4, ch4, 0, 180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + // lnTo (x3, ch) + p1.push(`L${x3},${ch}`) + // lnTo (ch2, ch) + p1.push(`L${ch2},${ch}`) + // arcTo wR=ch2 hR=ch2 stAng=3cd4(270°) swAng=-5400000(-90°) + cx = ch2 + cy = ch + arc = ooArcTo(cx, cy, ch2, ch2, 270, -90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + // lnTo (0, y7) + p1.push(`L${0},${y7}`) + // arcTo wR=ch2 hR=ch2 stAng=cd2(180°) swAng=-10800000(-180°) + cx = 0 + cy = y7 + arc = ooArcTo(cx, cy, ch2, ch2, 180, -180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + // lnTo (ch, y6) + p1.push(`L${ch},${y6}`) + // lnTo (x4, y6) + p1.push(`L${x4},${y6}`) + // arcTo wR=ch2 hR=ch2 stAng=cd4(90°) swAng=-5400000(-90°) + cx = x4 + cy = y6 + arc = ooArcTo(cx, cy, ch2, ch2, 90, -90) + p1.push(arc.svg) + p1.push('Z') + + // Sub-path 2 in Path 1: left bottom curl circle + cx = ch2 + cy = y4 + p1.push(`M${cx},${cy}`) + // arcTo wR=ch2 hR=ch2 stAng=cd4(90°) swAng=-5400000(-90°) + arc = ooArcTo(cx, cy, ch2, ch2, 90, -90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + // arcTo wR=ch4 hR=ch4 stAng=0 swAng=-10800000(-180°) + arc = ooArcTo(cx, cy, ch4, ch4, 0, -180) + p1.push(arc.svg) + p1.push('Z') + + // Path 2: darkenLess fill (stroke=false) — shadow areas + const p2: string[] = [] + // Sub-path 1: same as path1 sub-path2 (left bottom curl) + cx = ch2 + cy = y4 + p2.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, ch2, ch2, 90, -90) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + arc = ooArcTo(cx, cy, ch4, ch4, 0, -180) + p2.push(arc.svg) + p2.push('Z') + // Sub-path 2: right top curl + cx = x4 + cy = ch + p2.push(`M${cx},${cy}`) + // arcTo wR=ch2 hR=ch2 stAng=cd4(90°) swAng=-16200000(-270°) + arc = ooArcTo(cx, cy, ch2, ch2, 90, -270) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + // arcTo wR=ch4 hR=ch4 stAng=cd2(180°) swAng=-10800000(-180°) + arc = ooArcTo(cx, cy, ch4, ch4, 180, -180) + p2.push(arc.svg) + p2.push('Z') + + // Path 3: stroke-only detail lines (fill=none) + const p3: string[] = [] + // Sub-path 1: left side detail + cx = 0 + cy = y3 + p3.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, ch2, ch2, 180, 90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x3},${ch}`) + p3.push(`L${x3},${ch2}`) + cx = x3 + cy = ch2 + arc = ooArcTo(cx, cy, ch2, ch2, 180, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${w},${y5}`) + cx = w + cy = y5 + arc = ooArcTo(cx, cy, ch2, ch2, 0, 90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${ch},${y6}`) + p3.push(`L${ch},${y7}`) + cx = ch + cy = y7 + arc = ooArcTo(cx, cy, ch2, ch2, 0, 180) + p3.push(arc.svg) + p3.push('Z') + + // Sub-path 2: top-right connector + p3.push(`M${x3},${ch}`) + p3.push(`L${x4},${ch}`) + cx = x4 + cy = ch + arc = ooArcTo(cx, cy, ch2, ch2, 90, -90) + p3.push(arc.svg) + + // Sub-path 3: right curl inner detail + p3.push(`M${x4},${ch}`) + p3.push(`L${x4},${ch2}`) + cx = x4 + cy = ch2 + arc = ooArcTo(cx, cy, ch4, ch4, 0, 180) + p3.push(arc.svg) + + // Sub-path 4: left curl inner detail + p3.push(`M${ch2},${y4}`) + p3.push(`L${ch2},${y3}`) + cx = ch2 + cy = y3 + arc = ooArcTo(cx, cy, ch4, ch4, 180, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + arc = ooArcTo(cx, cy, ch2, ch2, 0, 180) + p3.push(arc.svg) + + // Sub-path 5: vertical divider + p3.push(`M${ch},${y3}`) + p3.push(`L${ch},${y6}`) + + return [ + { d: p1.join(' '), fill: 'norm', stroke: false }, + { d: p2.join(' '), fill: 'darkenLess', stroke: false }, + { d: p3.join(' '), fill: 'none', stroke: true }, + ] +}) + +// --- verticalScroll (OOXML spec: 3 paths with arcTo) --- +multiPathPresets.set('verticalscroll', (w, h, adjustments) => { + const adjVal = adjustments?.get('adj') ?? 12500 + const a = Math.min(Math.max(adjVal, 0), 25000) + const ss = Math.min(w, h) + const ch = (ss * a) / 100000 + const ch2 = ch / 2 + const ch4 = ch / 4 + + const x3 = ch + ch2 + const x4 = ch + ch + const x6 = w - ch + const x7 = w - ch2 + const _x5 = x6 - ch2 + const y3 = h - ch + const y4 = h - ch2 + + // Path 1: main fill (stroke=false) + const p1: string[] = [] + let cx: number + let cy: number + cx = ch2 + cy = h + p1.push(`M${cx},${cy}`) + let arc = ooArcTo(cx, cy, ch2, ch2, 90, -90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${ch2},${y4}`) + cx = ch2 + cy = y4 + arc = ooArcTo(cx, cy, ch4, ch4, 90, -180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${ch},${y3}`) + p1.push(`L${ch},${ch2}`) + cx = ch + cy = ch2 + arc = ooArcTo(cx, cy, ch2, ch2, 180, 90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x7},${0}`) + cx = x7 + cy = 0 + arc = ooArcTo(cx, cy, ch2, ch2, 270, 180) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + p1.push(`L${x6},${ch}`) + p1.push(`L${x6},${y4}`) + cx = x6 + cy = y4 + arc = ooArcTo(cx, cy, ch2, ch2, 0, 90) + p1.push(arc.svg) + p1.push('Z') + + // Sub-path 2: top-right curl circle + cx = x4 + cy = ch2 + p1.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, ch2, ch2, 0, 90) + p1.push(arc.svg) + cx = arc.x + cy = arc.y + arc = ooArcTo(cx, cy, ch4, ch4, 90, 180) + p1.push(arc.svg) + p1.push('Z') + + // Path 2: darkenLess fill (stroke=false) + const p2: string[] = [] + cx = x4 + cy = ch2 + p2.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, ch2, ch2, 0, 90) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + arc = ooArcTo(cx, cy, ch4, ch4, 90, 180) + p2.push(arc.svg) + p2.push('Z') + + cx = ch + cy = y4 + p2.push(`M${cx},${cy}`) + arc = ooArcTo(cx, cy, ch2, ch2, 0, 270) + p2.push(arc.svg) + cx = arc.x + cy = arc.y + arc = ooArcTo(cx, cy, ch4, ch4, 270, 180) + p2.push(arc.svg) + p2.push('Z') + + // Path 3: stroke-only detail lines (fill=none) + const p3: string[] = [] + cx = ch + cy = y3 + p3.push(`M${cx},${cy}`) + p3.push(`L${ch},${ch2}`) + cx = ch + cy = ch2 + arc = ooArcTo(cx, cy, ch2, ch2, 180, 90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x7},${0}`) + cx = x7 + cy = 0 + arc = ooArcTo(cx, cy, ch2, ch2, 270, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x6},${ch}`) + p3.push(`L${x6},${y4}`) + cx = x6 + cy = y4 + arc = ooArcTo(cx, cy, ch2, ch2, 0, 90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${ch2},${h}`) + cx = ch2 + cy = h + arc = ooArcTo(cx, cy, ch2, ch2, 90, 180) + p3.push(arc.svg) + p3.push('Z') + + // top curl + p3.push(`M${x3},${0}`) + cx = x3 + cy = 0 + arc = ooArcTo(cx, cy, ch2, ch2, 270, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + arc = ooArcTo(cx, cy, ch4, ch4, 90, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${x4},${ch2}`) + + // horizontal divider + p3.push(`M${x6},${ch}`) + p3.push(`L${x3},${ch}`) + + // bottom-left curl detail + p3.push(`M${ch2},${y3}`) + cx = ch2 + cy = y3 + arc = ooArcTo(cx, cy, ch4, ch4, 270, 180) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${ch},${y4}`) + + // bottom curl + p3.push(`M${ch2},${h}`) + cx = ch2 + cy = h + arc = ooArcTo(cx, cy, ch2, ch2, 90, -90) + p3.push(arc.svg) + cx = arc.x + cy = arc.y + p3.push(`L${ch},${y3}`) + + return [ + { d: p1.join(' '), fill: 'norm', stroke: false }, + { d: p2.join(' '), fill: 'darkenLess', stroke: false }, + { d: p3.join(' '), fill: 'none', stroke: true }, + ] +}) + +/** + * Get multi-path preset sub-paths for a shape type. + * Returns null if the shape is not a multi-path preset (use getPresetShapePath instead). + */ +export function getMultiPathPreset( + shapeType: string, + w: number, + h: number, + adjustments?: Map +): PresetSubPath[] | null { + const key = shapeType.toLowerCase() + const gen = multiPathPresets.get(key) ?? multiPathPresets.get(shapeType) + return gen ? gen(w, h, adjustments) : null +} + +export function getPresetShapePath( + shapeType: string, + w: number, + h: number, + adjustments?: Map +): string { + // means text-only shape without geometry. + if (shapeType === 'textNoShape' || shapeType.toLowerCase() === 'textnoshape') return '' + // OOXML preset names are often camelCase; normalize to lowercase for lookup + const key = shapeType.toLowerCase() + const generator = presetShapes.get(key) ?? presetShapes.get(shapeType) + if (generator) { + return generator(w, h, adjustments) + } + // Fallback: simple rectangle + logger.warn('Unknown preset shape, falling back to rectangle', { shapeType }) + return `M0,0 L${w},0 L${w},${h} L0,${h} Z` +} diff --git a/apps/sim/lib/pptx-renderer/shapes/shape-arc.ts b/apps/sim/lib/pptx-renderer/shapes/shape-arc.ts new file mode 100644 index 00000000000..b7563464446 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/shapes/shape-arc.ts @@ -0,0 +1,44 @@ +/** + * Convert OOXML arc specification to SVG path arc command. + * Based on PPTXjs shapeArc() implementation. + * + * @param cx - Center X coordinate + * @param cy - Center Y coordinate + * @param rx - Horizontal radius + * @param ry - Vertical radius + * @param startAngle - Start angle in degrees + * @param endAngle - End angle in degrees + * @param isClose - Whether to close the path with Z + * @returns SVG path string for the arc + */ +export function shapeArc( + cx: number, + cy: number, + rx: number, + ry: number, + startAngle: number, + endAngle: number, + isClose: boolean +): string { + const startRad = (startAngle * Math.PI) / 180 + const endRad = (endAngle * Math.PI) / 180 + + const x1 = cx + rx * Math.cos(startRad) + const y1 = cy + ry * Math.sin(startRad) + const x2 = cx + rx * Math.cos(endRad) + const y2 = cy + ry * Math.sin(endRad) + + // OOXML convention: always sweep clockwise from startAngle to endAngle. + // Compute the clockwise sweep in degrees, handling angle wrapping. + let sweepDeg = (((endAngle - startAngle) % 360) + 360) % 360 + if (sweepDeg === 0 && startAngle !== endAngle) sweepDeg = 360 + + const largeArc = sweepDeg > 180 ? 1 : 0 + const sweep = 1 // always clockwise + + let d = `M${x1},${y1} A${rx},${ry} 0 ${largeArc},${sweep} ${x2},${y2}` + if (isClose) { + d += ' Z' + } + return d +} diff --git a/apps/sim/lib/pptx-renderer/sim-pptx-viewer.test.ts b/apps/sim/lib/pptx-renderer/sim-pptx-viewer.test.ts new file mode 100644 index 00000000000..cbcafecc955 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/sim-pptx-viewer.test.ts @@ -0,0 +1,65 @@ +/** + * @vitest-environment jsdom + */ +import JSZip from 'jszip' +import { describe, expect, it, vi } from 'vitest' +import { openSimPptxViewer, SIM_PPTX_LIST_OPTIONS } from '@/lib/pptx-renderer/sim-pptx-viewer' + +async function createMinimalPptx(): Promise { + const zip = new JSZip() + zip.file('[Content_Types].xml', '') + zip.file( + 'ppt/presentation.xml', + ` + + + ` + ) + zip.file( + 'ppt/_rels/presentation.xml.rels', + ` + + ` + ) + zip.file( + 'ppt/slides/slide1.xml', + ` + + + + + + + ` + ) + zip.file('ppt/slides/_rels/slide1.xml.rels', '') + return zip.generateAsync({ type: 'arraybuffer' }) +} + +describe('openSimPptxViewer', () => { + it('renders a minimal PPTX and cleans up the container on destroy', async () => { + const container = document.createElement('div') + Object.defineProperty(container, 'clientWidth', { configurable: true, value: 960 }) + const onRenderComplete = vi.fn() + + const handle = await openSimPptxViewer({ + buffer: await createMinimalPptx(), + container, + onRenderComplete, + }) + + expect(onRenderComplete).toHaveBeenCalled() + expect(container.querySelector('[data-slide-index="0"]')).not.toBeNull() + + handle.destroy() + expect(container.innerHTML).toBe('') + }) + + it('uses windowed list rendering defaults for large decks', () => { + expect(SIM_PPTX_LIST_OPTIONS).toMatchObject({ + windowed: true, + batchSize: 8, + initialSlides: 4, + }) + }) +}) diff --git a/apps/sim/lib/pptx-renderer/sim-pptx-viewer.ts b/apps/sim/lib/pptx-renderer/sim-pptx-viewer.ts new file mode 100644 index 00000000000..c43094ec665 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/sim-pptx-viewer.ts @@ -0,0 +1,95 @@ +import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' +import type { ListRenderOptions } from '@/lib/pptx-renderer/core/viewer' +import { PptxViewer } from '@/lib/pptx-renderer/core/viewer' +import type { ZipParseLimits } from '@/lib/pptx-renderer/parser/zip-parser' + +const logger = createLogger('SimPptxViewer') + +export const SIM_PPTX_ZIP_LIMITS = { + maxEntries: 2500, + maxEntryUncompressedBytes: 50 * 1024 * 1024, + maxTotalUncompressedBytes: 200 * 1024 * 1024, + maxMediaBytes: 150 * 1024 * 1024, + maxConcurrency: 8, +} as const satisfies ZipParseLimits + +export const SIM_PPTX_LIST_OPTIONS = { + windowed: true, + batchSize: 8, + initialSlides: 4, + overscanViewport: 1.5, +} as const satisfies ListRenderOptions + +export interface OpenSimPptxViewerOptions { + buffer: ArrayBuffer | Uint8Array + container: HTMLElement + scrollContainer?: HTMLElement + signal?: AbortSignal + zipLimits?: ZipParseLimits + listOptions?: ListRenderOptions + onRenderStart?: () => void + onRenderComplete?: () => void + onSlideChange?: (index: number) => void + onSlideError?: (index: number, error: unknown) => void + onNodeError?: (nodeId: string, error: unknown) => void +} + +export interface SimPptxViewerHandle { + readonly viewer: PptxViewer + destroy(): void +} + +export async function openSimPptxViewer({ + buffer, + container, + scrollContainer, + signal, + zipLimits = SIM_PPTX_ZIP_LIMITS, + listOptions = SIM_PPTX_LIST_OPTIONS, + onRenderStart, + onRenderComplete, + onSlideChange, + onSlideError, + onNodeError, +}: OpenSimPptxViewerOptions): Promise { + const viewer = new PptxViewer(container, { + fitMode: 'contain', + scrollContainer, + zipLimits, + onRenderStart, + onRenderComplete, + onSlideChange, + onSlideError, + onNodeError, + }) + + let destroyed = false + const destroy = () => { + if (destroyed) return + destroyed = true + viewer.destroy() + } + + const abortDestroy = () => destroy() + signal?.addEventListener('abort', abortDestroy, { once: true }) + + try { + await viewer.open(buffer, { + renderMode: 'list', + listOptions, + signal, + }) + } catch (error) { + destroy() + const normalized = toError(error) + if (normalized.name !== 'AbortError') { + logger.warn('Failed to render PPTX preview', { error: normalized.message }) + } + throw normalized + } finally { + signal?.removeEventListener('abort', abortDestroy) + } + + return { viewer, destroy } +} diff --git a/apps/sim/lib/pptx-renderer/utils/color.ts b/apps/sim/lib/pptx-renderer/utils/color.ts new file mode 100644 index 00000000000..592d1459ac7 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/utils/color.ts @@ -0,0 +1,488 @@ +// ============================================================================ +// OOXML Color Utilities +// Full color manipulation for PowerPoint XML color processing +// ============================================================================ + +// --------------------------------------------------------------------------- +// Basic Color Conversions +// --------------------------------------------------------------------------- + +/** + * Parse a hex color string (with or without '#') into RGB components. + */ +export function hexToRgb(hex: string): { r: number; g: number; b: number } { + const cleaned = hex.replace(/^#/, '') + if (cleaned.length !== 6 && cleaned.length !== 3) { + return { r: 0, g: 0, b: 0 } + } + const full = + cleaned.length === 3 + ? cleaned[0] + cleaned[0] + cleaned[1] + cleaned[1] + cleaned[2] + cleaned[2] + : cleaned + const num = Number.parseInt(full, 16) + return { + r: (num >> 16) & 0xff, + g: (num >> 8) & 0xff, + b: num & 0xff, + } +} + +/** + * Convert RGB components (0-255 each) to a 6-digit hex string with '#' prefix. + */ +export function rgbToHex(r: number, g: number, b: number): string { + const clamp = (v: number): number => Math.max(0, Math.min(255, Math.round(v))) + return `#${[clamp(r), clamp(g), clamp(b)].map((c) => c.toString(16).padStart(2, '0')).join('')}` +} + +/** + * Convert RGB (0-255) to HSL (h: 0-360, s: 0-1, l: 0-1). + */ +export function rgbToHsl(r: number, g: number, b: number): { h: number; s: number; l: number } { + const rn = r / 255 + const gn = g / 255 + const bn = b / 255 + const max = Math.max(rn, gn, bn) + const min = Math.min(rn, gn, bn) + const l = (max + min) / 2 + let h = 0 + let s = 0 + + if (max !== min) { + const d = max - min + s = l > 0.5 ? d / (2 - max - min) : d / (max + min) + switch (max) { + case rn: + h = ((gn - bn) / d + (gn < bn ? 6 : 0)) * 60 + break + case gn: + h = ((bn - rn) / d + 2) * 60 + break + case bn: + h = ((rn - gn) / d + 4) * 60 + break + } + } + + return { h, s, l } +} + +/** + * Convert HSL (h: 0-360, s: 0-1, l: 0-1) to RGB (0-255). + */ +export function hslToRgb(h: number, s: number, l: number): { r: number; g: number; b: number } { + h = ((h % 360) + 360) % 360 // normalize hue + s = Math.max(0, Math.min(1, s)) + l = Math.max(0, Math.min(1, l)) + + if (s === 0) { + const v = Math.round(l * 255) + return { r: v, g: v, b: v } + } + + const hueToRgb = (p: number, q: number, t: number): number => { + if (t < 0) t += 1 + if (t > 1) t -= 1 + if (t < 1 / 6) return p + (q - p) * 6 * t + if (t < 1 / 2) return q + if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6 + return p + } + + const q = l < 0.5 ? l * (1 + s) : l + s - l * s + const p = 2 * l - q + const hNorm = h / 360 + + return { + r: Math.round(hueToRgb(p, q, hNorm + 1 / 3) * 255), + g: Math.round(hueToRgb(p, q, hNorm) * 255), + b: Math.round(hueToRgb(p, q, hNorm - 1 / 3) * 255), + } +} + +// --------------------------------------------------------------------------- +// sRGB ↔ Linear RGB conversion (IEC 61966-2-1) +// PowerPoint applies tint/shade in linear (scene-referred) space. +// --------------------------------------------------------------------------- + +function srgbToLinear(c: number): number { + const s = c / 255 + return s <= 0.04045 ? s / 12.92 : ((s + 0.055) / 1.055) ** 2.4 +} + +function linearToSrgb(c: number): number { + const s = c <= 0.0031308 ? c * 12.92 : 1.055 * c ** (1 / 2.4) - 0.055 + return Math.max(0, Math.min(255, Math.round(s * 255))) +} + +// --------------------------------------------------------------------------- +// OOXML Color Modifiers +// --------------------------------------------------------------------------- + +/** + * Apply tint modifier (mix toward white in linear RGB space). + * OOXML spec: tint val is 0-100000 where 100000 = original color, 0 = fully white. + * PowerPoint performs the blend in linear RGB space for perceptual correctness. + */ +export function applyTint(hex: string, tint: number): string { + const { r, g, b } = hexToRgb(hex) + const t = tint / 100000 + const rl = srgbToLinear(r) + const gl = srgbToLinear(g) + const bl = srgbToLinear(b) + return rgbToHex( + linearToSrgb(rl * t + 1.0 * (1 - t)), + linearToSrgb(gl * t + 1.0 * (1 - t)), + linearToSrgb(bl * t + 1.0 * (1 - t)) + ) +} + +/** + * Apply shade modifier (mix toward black in linear RGB space). + * shade: 0-100000 where 100000 = original color, 0 = fully black. + */ +export function applyShade(hex: string, shade: number): string { + const { r, g, b } = hexToRgb(hex) + const s = shade / 100000 + return rgbToHex( + linearToSrgb(srgbToLinear(r) * s), + linearToSrgb(srgbToLinear(g) * s), + linearToSrgb(srgbToLinear(b) * s) + ) +} + +/** + * Apply luminance modulation. + * lumMod: percentage in OOXML units (e.g., 75000 = 75%). + * Multiplies the L channel of HSL. + */ +export function applyLumMod(hex: string, lumMod: number): string { + const { r, g, b } = hexToRgb(hex) + const { h, s, l } = rgbToHsl(r, g, b) + const newL = Math.max(0, Math.min(1, l * (lumMod / 100000))) + const rgb = hslToRgb(h, s, newL) + return rgbToHex(rgb.r, rgb.g, rgb.b) +} + +/** + * Apply luminance offset. + * lumOff: percentage offset in OOXML units (e.g., 25000 = +25%). + * Adds to the L channel of HSL. + */ +export function applyLumOff(hex: string, lumOff: number): string { + const { r, g, b } = hexToRgb(hex) + const { h, s, l } = rgbToHsl(r, g, b) + const newL = Math.max(0, Math.min(1, l + lumOff / 100000)) + const rgb = hslToRgb(h, s, newL) + return rgbToHex(rgb.r, rgb.g, rgb.b) +} + +/** + * Apply saturation modulation. + * satMod: percentage in OOXML units (e.g., 120000 = 120%). + * Multiplies the S channel of HSL. + */ +export function applySatMod(hex: string, satMod: number): string { + const { r, g, b } = hexToRgb(hex) + const { h, s, l } = rgbToHsl(r, g, b) + const newS = Math.max(0, Math.min(1, s * (satMod / 100000))) + const rgb = hslToRgb(h, newS, l) + return rgbToHex(rgb.r, rgb.g, rgb.b) +} + +/** + * Apply hue modulation. + * hueMod: percentage in OOXML units (e.g., 60000 = shift hue by ratio). + * In OOXML, hueMod multiplies the hue value. Hue wraps around at 360. + */ +export function applyHueMod(hex: string, hueMod: number): string { + const { r, g, b } = hexToRgb(hex) + const { h, s, l } = rgbToHsl(r, g, b) + const newH = (h * (hueMod / 100000)) % 360 + const rgb = hslToRgb(newH, s, l) + return rgbToHex(rgb.r, rgb.g, rgb.b) +} + +/** + * Apply hue offset (additive). + * hueOff: in 60000ths of a degree (OOXML ST_FixedAngle). + * Adds to the hue channel of HSL, wrapping at 360. + */ +export function applyHueOff(hex: string, hueOff: number): string { + const { r, g, b } = hexToRgb(hex) + const { h, s, l } = rgbToHsl(r, g, b) + const offsetDeg = hueOff / 60000 + const newH = (((h + offsetDeg) % 360) + 360) % 360 + const rgb = hslToRgb(newH, s, l) + return rgbToHex(rgb.r, rgb.g, rgb.b) +} + +/** + * Apply saturation offset (additive). + * satOff: in OOXML percentage units (100000 = 100%). + * Adds to the S channel of HSL. + */ +export function applySatOff(hex: string, satOff: number): string { + const { r, g, b } = hexToRgb(hex) + const { h, s, l } = rgbToHsl(r, g, b) + const newS = Math.max(0, Math.min(1, s + satOff / 100000)) + const rgb = hslToRgb(h, newS, l) + return rgbToHex(rgb.r, rgb.g, rgb.b) +} + +/** + * Convert OOXML alpha value (0-100000) to CSS opacity (0-1). + * 100000 = fully opaque, 0 = fully transparent. + */ +export function applyAlpha(alpha: number): number { + return Math.max(0, Math.min(1, alpha / 100000)) +} + +// --------------------------------------------------------------------------- +// Composite Modifier Application +// --------------------------------------------------------------------------- + +export interface ColorModifier { + name: string + val: number +} + +/** + * Apply all OOXML color modifiers from an array of {name, val} objects. + * Modifiers are applied in the order they appear (matching XML document order). + * Returns the final hex color and alpha value. + */ +export function applyColorModifiers( + hex: string, + modifiers: ColorModifier[] +): { color: string; alpha: number } { + let color = hex + let alpha = 1 + + for (const mod of modifiers) { + switch (mod.name) { + case 'tint': + case 'a:tint': + color = applyTint(color, mod.val) + break + case 'shade': + case 'a:shade': + color = applyShade(color, mod.val) + break + case 'lumMod': + case 'a:lumMod': + color = applyLumMod(color, mod.val) + break + case 'lumOff': + case 'a:lumOff': + color = applyLumOff(color, mod.val) + break + case 'satMod': + case 'a:satMod': + color = applySatMod(color, mod.val) + break + case 'hueMod': + case 'a:hueMod': + color = applyHueMod(color, mod.val) + break + case 'hueOff': + case 'a:hueOff': + color = applyHueOff(color, mod.val) + break + case 'satOff': + case 'a:satOff': + color = applySatOff(color, mod.val) + break + case 'alpha': + case 'a:alpha': + alpha = applyAlpha(mod.val) + break + case 'alphaOff': + case 'a:alphaOff': + alpha = Math.max(0, Math.min(1, alpha + mod.val / 100000)) + break + default: + // Unknown modifier - skip silently + break + } + } + + return { color, alpha } +} + +// --------------------------------------------------------------------------- +// OOXML Preset Color Table +// --------------------------------------------------------------------------- + +const PRESET_COLORS: Record = { + // Basic colors + black: '#000000', + white: '#FFFFFF', + red: '#FF0000', + green: '#008000', + blue: '#0000FF', + yellow: '#FFFF00', + cyan: '#00FFFF', + magenta: '#FF00FF', + + // Extended standard colors + orange: '#FFA500', + purple: '#800080', + brown: '#A52A2A', + pink: '#FFC0CB', + gray: '#808080', + grey: '#808080', + lime: '#00FF00', + navy: '#000080', + teal: '#008080', + maroon: '#800000', + olive: '#808000', + silver: '#C0C0C0', + aqua: '#00FFFF', + fuchsia: '#FF00FF', + + // OOXML-specific preset colors + aliceBlue: '#F0F8FF', + antiqueWhite: '#FAEBD7', + aquamarine: '#7FFFD4', + azure: '#F0FFFF', + beige: '#F5F5DC', + bisque: '#FFE4C4', + blanchedAlmond: '#FFEBCD', + blueViolet: '#8A2BE2', + burlyWood: '#DEB887', + cadetBlue: '#5F9EA0', + chartreuse: '#7FFF00', + chocolate: '#D2691E', + coral: '#FF7F50', + cornflowerBlue: '#6495ED', + cornsilk: '#FFF8DC', + crimson: '#DC143C', + darkBlue: '#00008B', + darkCyan: '#008B8B', + darkGoldenrod: '#B8860B', + darkGray: '#A9A9A9', + darkGrey: '#A9A9A9', + darkGreen: '#006400', + darkKhaki: '#BDB76B', + darkMagenta: '#8B008B', + darkOliveGreen: '#556B2F', + darkOrange: '#FF8C00', + darkOrchid: '#9932CC', + darkRed: '#8B0000', + darkSalmon: '#E9967A', + darkSeaGreen: '#8FBC8F', + darkSlateBlue: '#483D8B', + darkSlateGray: '#2F4F4F', + darkSlateGrey: '#2F4F4F', + darkTurquoise: '#00CED1', + darkViolet: '#9400D3', + deepPink: '#FF1493', + deepSkyBlue: '#00BFFF', + dimGray: '#696969', + dimGrey: '#696969', + dodgerBlue: '#1E90FF', + firebrick: '#B22222', + floralWhite: '#FFFAF0', + forestGreen: '#228B22', + gainsboro: '#DCDCDC', + ghostWhite: '#F8F8FF', + gold: '#FFD700', + goldenrod: '#DAA520', + greenYellow: '#ADFF2F', + honeydew: '#F0FFF0', + hotPink: '#FF69B4', + indianRed: '#CD5C5C', + indigo: '#4B0082', + ivory: '#FFFFF0', + khaki: '#F0E68C', + lavender: '#E6E6FA', + lavenderBlush: '#FFF0F5', + lawnGreen: '#7CFC00', + lemonChiffon: '#FFFACD', + lightBlue: '#ADD8E6', + lightCoral: '#F08080', + lightCyan: '#E0FFFF', + lightGoldenrodYellow: '#FAFAD2', + lightGray: '#D3D3D3', + lightGrey: '#D3D3D3', + lightGreen: '#90EE90', + lightPink: '#FFB6C1', + lightSalmon: '#FFA07A', + lightSeaGreen: '#20B2AA', + lightSkyBlue: '#87CEFA', + lightSlateGray: '#778899', + lightSlateGrey: '#778899', + lightSteelBlue: '#B0C4DE', + lightYellow: '#FFFFE0', + limeGreen: '#32CD32', + linen: '#FAF0E6', + mediumAquamarine: '#66CDAA', + mediumBlue: '#0000CD', + mediumOrchid: '#BA55D3', + mediumPurple: '#9370DB', + mediumSeaGreen: '#3CB371', + mediumSlateBlue: '#7B68EE', + mediumSpringGreen: '#00FA9A', + mediumTurquoise: '#48D1CC', + mediumVioletRed: '#C71585', + midnightBlue: '#191970', + mintCream: '#F5FFFA', + mistyRose: '#FFE4E1', + moccasin: '#FFE4B5', + navajoWhite: '#FFDEAD', + oldLace: '#FDF5E6', + oliveDrab: '#6B8E23', + orangeRed: '#FF4500', + orchid: '#DA70D6', + paleGoldenrod: '#EEE8AA', + paleGreen: '#98FB98', + paleTurquoise: '#AFEEEE', + paleVioletRed: '#DB7093', + papayaWhip: '#FFEFD5', + peachPuff: '#FFDAB9', + peru: '#CD853F', + plum: '#DDA0DD', + powderBlue: '#B0E0E6', + rosyBrown: '#BC8F8F', + royalBlue: '#4169E1', + saddleBrown: '#8B4513', + salmon: '#FA8072', + sandyBrown: '#F4A460', + seaGreen: '#2E8B57', + seaShell: '#FFF5EE', + sienna: '#A0522D', + skyBlue: '#87CEEB', + slateBlue: '#6A5ACD', + slateGray: '#708090', + slateGrey: '#708090', + snow: '#FFFAFA', + springGreen: '#00FF7F', + steelBlue: '#4682B4', + tan: '#D2B48C', + thistle: '#D8BFD8', + tomato: '#FF6347', + turquoise: '#40E0D0', + violet: '#EE82EE', + wheat: '#F5DEB3', + whiteSmoke: '#F5F5F5', + yellowGreen: '#9ACD32', +} + +/** + * Look up a preset OOXML color name and return its hex value. + * Returns undefined if the name is not recognized. + */ +export function presetColorToHex(name: string): string | undefined { + // Try exact match first, then case-insensitive + if (PRESET_COLORS[name] !== undefined) { + return PRESET_COLORS[name] + } + const lower = name.toLowerCase() + for (const [key, value] of Object.entries(PRESET_COLORS)) { + if (key.toLowerCase() === lower) { + return value + } + } + return undefined +} diff --git a/apps/sim/lib/pptx-renderer/utils/emf-parser.ts b/apps/sim/lib/pptx-renderer/utils/emf-parser.ts new file mode 100644 index 00000000000..a9f8005cefa --- /dev/null +++ b/apps/sim/lib/pptx-renderer/utils/emf-parser.ts @@ -0,0 +1,289 @@ +/** + * EMF (Enhanced Metafile) binary parser — extracts embedded content from EMF files. + * + * PPTX files frequently embed EMF images as OLE object previews. + * Most contain embedded PDF data inside GDI comment records, or DIB bitmaps + * via STRETCHDIBITS records. This parser extracts those embedded resources + * without implementing full EMF record interpretation. + * + * EMF record format: each record is { type: u32, size: u32, ...data } + * Records are walked sequentially until EOF record (type 14). + */ + +export type EmfContent = + | { type: 'pdf'; data: Uint8Array } + | { type: 'bitmap'; imageData: ImageData } + | { type: 'empty' } + | { type: 'unsupported' } + +// EMF record types +const EMR_EOF = 14 +const EMR_COMMENT = 70 +const EMR_STRETCHDIBITS = 81 + +// GDI comment identifiers (MS-EMF spec) +const GDIC_COMMENT_ID = 0x43494447 // "GDIC" +const GDIC_BEGINGROUP = 0x00000002 +const GDIC_MULTIFORMATS = 0x40000004 + +// EMF header signature at offset 40 +const EMF_SIGNATURE = 0x464d4520 // " EMF" + +// PDF markers +const PDF_HEADER = [0x25, 0x50, 0x44, 0x46] // "%PDF" +const PDF_EOF = [0x25, 0x25, 0x45, 0x4f, 0x46] // "%%EOF" + +// DIB compression +const BI_RGB = 0 + +/** + * Parse an EMF file and extract its embedded content. + */ +export function parseEmfContent(data: Uint8Array): EmfContent { + if (data.length < 44) return { type: 'unsupported' } + + const view = new DataView(data.buffer, data.byteOffset, data.byteLength) + + // Validate EMF signature at offset 40 + if (view.getUint32(40, true) !== EMF_SIGNATURE) { + return { type: 'unsupported' } + } + + let offset = 0 + let recordCount = 0 + + while (offset + 8 <= data.length) { + const recordType = view.getUint32(offset, true) + const recordSize = view.getUint32(offset + 4, true) + + // Sanity check record size + if (recordSize < 8 || offset + recordSize > data.length) break + + recordCount++ + + if (recordType === EMR_EOF) break + + // Check GDI Comment records for embedded PDF + if (recordType === EMR_COMMENT && recordSize > 16) { + const result = parseGdiComment(data, view, offset, recordSize) + if (result) return result + } + + // Check STRETCHDIBITS for embedded bitmaps + if (recordType === EMR_STRETCHDIBITS && recordSize > 80) { + const result = parseStretchDibits(data, view, offset, recordSize) + if (result) return result + } + + offset += recordSize + } + + // Only HEADER + EOF → empty + if (recordCount <= 2) { + return { type: 'empty' } + } + + return { type: 'unsupported' } +} + +/** + * Parse a GDI Comment record looking for embedded PDF data. + */ +function parseGdiComment( + data: Uint8Array, + view: DataView, + offset: number, + recordSize: number +): EmfContent | null { + // Record layout: type(4) + size(4) + cbData(4) + commentId(4) + ... + if (offset + 16 > data.length) return null + + const commentId = view.getUint32(offset + 12, true) + + if (commentId === GDIC_COMMENT_ID && offset + 20 <= data.length) { + const publicType = view.getUint32(offset + 16, true) + + if (publicType === GDIC_BEGINGROUP) { + // Search for %PDF signature in the record data + const recordData = data.subarray(offset + 8, offset + recordSize) + const pdf = extractPdfFromBuffer(recordData) + if (pdf) return { type: 'pdf', data: pdf } + } + + if (publicType === GDIC_MULTIFORMATS && offset + 24 <= data.length) { + // MULTIFORMATS: parse format descriptors and extract first usable one + const result = parseMultiformats(data, view, offset, recordSize) + if (result) return result + } + } + + // Also search non-GDIC comments for raw PDF data + if (recordSize > 100) { + const recordData = data.subarray(offset + 8, offset + recordSize) + const pdf = extractPdfFromBuffer(recordData) + if (pdf) return { type: 'pdf', data: pdf } + } + + return null +} + +/** + * Parse MULTIFORMATS GDI comment — contains format descriptors pointing to embedded data. + */ +function parseMultiformats( + data: Uint8Array, + view: DataView, + offset: number, + _recordSize: number +): EmfContent | null { + // Layout from record start: + // +12: commentIdentifier(4), +16: publicCommentIdentifier(4) + // +20: outputRect(16 = RECTL) + // +36: countFormats(4) + // +40: format descriptors array, each: { signature(4), version(4), cbData(4), offData(4) } + if (offset + 40 > data.length) return null + + const countFormats = view.getUint32(offset + 36, true) + const descriptorStart = offset + 40 + + for (let i = 0; i < countFormats && i < 10; i++) { + const descOff = descriptorStart + i * 16 + if (descOff + 16 > data.length) break + + const cbData = view.getUint32(descOff + 8, true) + const offData = view.getUint32(descOff + 12, true) + + // offData is relative to the start of the record + const dataStart = offset + offData + if (dataStart + cbData > data.length || cbData === 0) continue + + const formatData = data.subarray(dataStart, dataStart + cbData) + const pdf = extractPdfFromBuffer(formatData) + if (pdf) return { type: 'pdf', data: pdf } + } + + return null +} + +/** + * Search for %PDF...%%EOF in a buffer and extract the PDF bytes. + */ +function extractPdfFromBuffer(buf: Uint8Array): Uint8Array | null { + const pdfStart = findSequence(buf, PDF_HEADER) + if (pdfStart === -1) return null + + // Search for %%EOF from the end (PDF may have multiple %%EOF; take the last one) + let pdfEnd = -1 + for (let i = buf.length - PDF_EOF.length; i >= pdfStart; i--) { + if (matchesAt(buf, i, PDF_EOF)) { + pdfEnd = i + PDF_EOF.length + break + } + } + + if (pdfEnd === -1) { + // No %%EOF found — take everything from %PDF to end of buffer + pdfEnd = buf.length + } + + return buf.slice(pdfStart, pdfEnd) +} + +/** + * Parse a STRETCHDIBITS record and extract the bitmap as ImageData. + */ +function parseStretchDibits( + data: Uint8Array, + view: DataView, + offset: number, + _recordSize: number +): EmfContent | null { + // STRETCHDIBITS record layout (offsets from record start): + // 0: type(4), 4: size(4) + // 8: rclBounds (16 bytes) + // 24: xDest(4), 28: yDest(4) + // 32: xSrc(4), 36: ySrc(4) + // 40: cxSrc(4), 44: cySrc(4) + // 48: offBmiSrc(4), 52: cbBmiSrc(4) + // 56: offBitsSrc(4), 60: cbBitsSrc(4) + // 64: iUsageSrc(4), 68: dwRop(4) + // 72: cxDest(4), 76: cyDest(4) + if (offset + 80 > data.length) return null + + const offBmiSrc = view.getUint32(offset + 48, true) + const cbBmiSrc = view.getUint32(offset + 52, true) + const offBitsSrc = view.getUint32(offset + 56, true) + const cbBitsSrc = view.getUint32(offset + 60, true) + + if (cbBmiSrc === 0 || cbBitsSrc === 0) return null + + const bmiStart = offset + offBmiSrc + if (bmiStart + 40 > data.length) return null + + // Parse BITMAPINFOHEADER + const biWidth = view.getInt32(bmiStart + 4, true) + const biHeight = view.getInt32(bmiStart + 8, true) + const biBitCount = view.getUint16(bmiStart + 14, true) + const biCompression = view.getUint32(bmiStart + 16, true) + + // Only support uncompressed RGB bitmaps + if (biCompression !== BI_RGB) return null + if (biBitCount !== 24 && biBitCount !== 32) return null + + const width = Math.abs(biWidth) + const height = Math.abs(biHeight) + if (width === 0 || height === 0 || width > 8192 || height > 8192) return null + + const bitsStart = offset + offBitsSrc + if (bitsStart + cbBitsSrc > data.length) return null + + const bitsData = data.subarray(bitsStart, bitsStart + cbBitsSrc) + + // Negative height means top-down row order; positive means bottom-up + const topDown = biHeight < 0 + + const imageData = new ImageData(width, height) + const bytesPerPixel = biBitCount / 8 + // DIB rows are padded to 4-byte boundaries + const rowStride = Math.ceil((width * bytesPerPixel) / 4) * 4 + + for (let y = 0; y < height; y++) { + const srcRow = topDown ? y : height - 1 - y + const srcOffset = srcRow * rowStride + const dstOffset = y * width * 4 + + for (let x = 0; x < width; x++) { + const srcIdx = srcOffset + x * bytesPerPixel + if (srcIdx + bytesPerPixel > bitsData.length) break + + // DIB stores BGR(A) + imageData.data[dstOffset + x * 4 + 0] = bitsData[srcIdx + 2] // R + imageData.data[dstOffset + x * 4 + 1] = bitsData[srcIdx + 1] // G + imageData.data[dstOffset + x * 4 + 2] = bitsData[srcIdx + 0] // B + imageData.data[dstOffset + x * 4 + 3] = biBitCount === 32 ? bitsData[srcIdx + 3] : 255 + } + } + + return { type: 'bitmap', imageData } +} + +/** + * Find the first occurrence of a byte sequence in a buffer. + */ +function findSequence(buf: Uint8Array, seq: number[]): number { + const end = buf.length - seq.length + for (let i = 0; i <= end; i++) { + if (matchesAt(buf, i, seq)) return i + } + return -1 +} + +/** + * Check if buffer matches a byte sequence at a given offset. + */ +function matchesAt(buf: Uint8Array, offset: number, seq: number[]): boolean { + for (let j = 0; j < seq.length; j++) { + if (buf[offset + j] !== seq[j]) return false + } + return true +} diff --git a/apps/sim/lib/pptx-renderer/utils/media.ts b/apps/sim/lib/pptx-renderer/utils/media.ts new file mode 100644 index 00000000000..1f810fa7d33 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/utils/media.ts @@ -0,0 +1,73 @@ +/** + * Media utilities — MIME type detection, path resolution, and blob URL management. + */ + +/** + * Determine MIME type from file extension. + * Covers images, video, and audio formats used in PPTX files. + */ +export function getMimeType(path: string): string { + const ext = path.split('.').pop()?.toLowerCase() || '' + const mimeMap: Record = { + png: 'image/png', + jpg: 'image/jpeg', + jpeg: 'image/jpeg', + gif: 'image/gif', + svg: 'image/svg+xml', + bmp: 'image/bmp', + tiff: 'image/tiff', + tif: 'image/tiff', + emf: 'image/x-emf', + wmf: 'image/x-wmf', + webp: 'image/webp', + mp4: 'video/mp4', + m4v: 'video/mp4', + webm: 'video/webm', + avi: 'video/x-msvideo', + mp3: 'audio/mpeg', + wav: 'audio/wav', + m4a: 'audio/mp4', + ogg: 'audio/ogg', + } + return mimeMap[ext] || 'application/octet-stream' +} + +/** + * Resolve a relative media path (from rels) to its canonical path in PptxFiles.media. + * Rels targets are relative like "../media/image1.png". + * Media paths in PptxFiles are like "ppt/media/image1.png". + */ +export function resolveMediaPath(target: string): string { + const fileName = target.split('/').pop() || '' + return `ppt/media/${fileName}` +} + +/** + * Get or create a blob URL for a media file, using a cache to avoid duplicates. + * + * @param mediaPath - Canonical path (e.g. "ppt/media/image1.png") + * @param data - Raw media data (Uint8Array or ArrayBuffer) + * @param cache - Map to store/retrieve cached blob URLs + * @returns The blob URL string + */ +export function getOrCreateBlobUrl( + mediaPath: string, + data: Uint8Array | ArrayBuffer, + cache: Map +): string { + let url = cache.get(mediaPath) + if (!url) { + const mime = getMimeType(mediaPath) + const blobPart = data instanceof ArrayBuffer ? data : copyToArrayBuffer(data) + const blob = new Blob([blobPart], { type: mime }) + url = URL.createObjectURL(blob) + cache.set(mediaPath, url) + } + return url +} + +function copyToArrayBuffer(data: Uint8Array): ArrayBuffer { + const copy = new Uint8Array(data.byteLength) + copy.set(data) + return copy.buffer +} diff --git a/apps/sim/lib/pptx-renderer/utils/pdf-renderer.ts b/apps/sim/lib/pptx-renderer/utils/pdf-renderer.ts new file mode 100644 index 00000000000..7c3fee93463 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/utils/pdf-renderer.ts @@ -0,0 +1,198 @@ +/** + * PDF-to-image renderer for embedded EMF PDFs. + * + * pdfjs-dist v5 has process-level shared state (PagesMapper.#pagesNumber, + * GlobalWorkerOptions.workerSrc, PDFWorker.#isWorkerDisabled) that a library + * must never touch on the main thread — doing so clobbers the host app's pdfjs + * configuration. + * + * Solution: render EMF PDFs exclusively inside a dedicated Web Worker. The + * worker loads its OWN pdfjs instance via dynamic import, so all static state + * is fully isolated from the main thread. + * + * If Worker + OffscreenCanvas are unavailable (extremely rare in 2025+ + * browsers), rendering is skipped and the caller gets null — no main-thread + * fallback, no global state pollution. + */ + +// --------------------------------------------------------------------------- +// Resolved pdfjs URL — computed once from main thread's module resolution +// --------------------------------------------------------------------------- + +let _pdfjsUrl: string | null = null + +function getPdfjsUrl(): string | null { + if (_pdfjsUrl !== null) return _pdfjsUrl + try { + // Resolve via the bundler/dev server so the URL is usable from a Worker + _pdfjsUrl = new URL('pdfjs-dist/build/pdf.min.mjs', import.meta.url).toString() + } catch { + _pdfjsUrl = '' + } + return _pdfjsUrl || null +} + +// --------------------------------------------------------------------------- +// Worker-based renderer (fully isolated from main thread pdfjs) +// --------------------------------------------------------------------------- + +/** + * Inline source for the PDF render worker. + * Receives: { id, pdfData, width, height, pdfjsUrl } + * Posts back: { id, blob } or { id, error } + * + * The worker loads its OWN pdfjs instance via dynamic import, so its static + * PagesMapper state is completely independent of the main thread. + * pdfjs's own internal worker is disabled (workerPort = null, workerSrc = '') + * so pdfjs runs single-threaded inside this worker — acceptable for tiny + * 1-page EMF PDFs. + */ +const WORKER_SRC = /* js */ ` +let pdfjsLib = null; + +self.onmessage = async (e) => { + const { id, pdfData, width, height, pdfjsUrl } = e.data; + try { + if (!pdfjsLib) { + pdfjsLib = await import(pdfjsUrl); + pdfjsLib.GlobalWorkerOptions.workerSrc = ''; + } + + const doc = await pdfjsLib.getDocument({ data: pdfData }).promise; + try { + if (doc.numPages < 1) { + self.postMessage({ id, error: 'no pages' }); + return; + } + const page = await doc.getPage(1); + const vp = page.getViewport({ scale: 1 }); + const scale = Math.max(width / vp.width, height / vp.height); + const svp = page.getViewport({ scale }); + + const canvas = new OffscreenCanvas(Math.ceil(svp.width), Math.ceil(svp.height)); + const ctx = canvas.getContext('2d', { alpha: true }); + await page.render({ canvasContext: ctx, viewport: svp, background: 'rgba(0,0,0,0)' }).promise; + + const blob = await canvas.convertToBlob({ type: 'image/png' }); + self.postMessage({ id, blob }); + } finally { + doc.destroy(); + } + } catch (err) { + self.postMessage({ id, error: String(err) }); + } +}; +` + +let _worker: Worker | null = null +let _workerFailed = false +let _msgId = 0 +const _pending = new Map< + number, + { resolve: (b: Blob | null) => void; reject: (e: Error) => void } +>() + +function getWorker(_pdfjsUrl: string): Worker | null { + if (_workerFailed) return null + if (_worker) return _worker + + try { + const blob = new Blob([WORKER_SRC], { type: 'text/javascript' }) + const url = URL.createObjectURL(blob) + _worker = new Worker(url, { type: 'module' }) + + _worker.onmessage = (e: MessageEvent) => { + const { id, blob, error } = e.data + const entry = _pending.get(id) + if (!entry) return + _pending.delete(id) + if (error) { + entry.resolve(null) // Treat worker-side errors as "no result" + } else { + entry.resolve(blob ?? null) + } + } + + _worker.onerror = () => { + // Worker failed to initialize (e.g. module import blocked by CSP) + _workerFailed = true + _worker = null + for (const [, entry] of _pending) { + entry.resolve(null) + } + _pending.clear() + } + + return _worker + } catch { + _workerFailed = true + return null + } +} + +function renderInWorker( + pdfData: Uint8Array, + width: number, + height: number, + pdfjsUrl: string +): Promise { + return new Promise((resolve) => { + const worker = getWorker(pdfjsUrl) + if (!worker) { + resolve(null) + return + } + + const id = ++_msgId + _pending.set(id, { + resolve, + reject: () => resolve(null), + }) + + // Transfer the buffer to avoid copying + const copy = pdfData.slice() // copy so caller retains original + worker.postMessage({ id, pdfData: copy, width, height, pdfjsUrl }, [copy.buffer]) + + // Timeout: if worker doesn't respond in 15s, give up + setTimeout(() => { + if (_pending.has(id)) { + _pending.delete(id) + resolve(null) + } + }, 15000) + }) +} + +// --------------------------------------------------------------------------- +// Public API +// --------------------------------------------------------------------------- + +/** + * Render page 1 of a PDF to a blob URL image. + * + * Uses a dedicated Web Worker with its own pdfjs instance, fully isolated + * from the main thread. Never touches GlobalWorkerOptions or any other + * pdfjs global state on the main thread. + * + * @returns blob URL string, or null if rendering fails or Worker is unavailable + */ +export async function renderPdfToImage( + pdfData: Uint8Array, + width: number, + height: number +): Promise { + const pdfjsUrl = getPdfjsUrl() + + if (!pdfjsUrl || typeof OffscreenCanvas === 'undefined' || typeof Worker === 'undefined') { + return null + } + + try { + const blob = await renderInWorker(pdfData, width, height, pdfjsUrl) + if (blob) return URL.createObjectURL(blob) + } catch { + // Worker failed — no fallback, return null + } + + return null +} diff --git a/apps/sim/lib/pptx-renderer/utils/preview-scale.ts b/apps/sim/lib/pptx-renderer/utils/preview-scale.ts new file mode 100644 index 00000000000..9b3f67878f7 --- /dev/null +++ b/apps/sim/lib/pptx-renderer/utils/preview-scale.ts @@ -0,0 +1,40 @@ +export interface ComputePanelScaleInput { + panelWidth: number + elementWidth?: number | null + elementHeight?: number | null + fallbackWidth: number + fallbackHeight: number +} + +export interface ComputePanelScaleResult { + scale: number + scaledHeight: number +} + +export function computePanelScale(input: ComputePanelScaleInput): ComputePanelScaleResult | null { + const panelWidth = Number.isFinite(input.panelWidth) ? input.panelWidth : 0 + if (panelWidth <= 0) return null + + const baseWidth = + Number.isFinite(input.elementWidth) && input.elementWidth! > 0 + ? input.elementWidth! + : input.fallbackWidth + const baseHeight = + Number.isFinite(input.elementHeight) && input.elementHeight! > 0 + ? input.elementHeight! + : input.fallbackHeight + if ( + !Number.isFinite(baseWidth) || + !Number.isFinite(baseHeight) || + baseWidth <= 0 || + baseHeight <= 0 + ) { + return null + } + + const scale = panelWidth / baseWidth + return { + scale, + scaledHeight: baseHeight * scale, + } +} diff --git a/apps/sim/lib/pptx-renderer/utils/url-safety.test.ts b/apps/sim/lib/pptx-renderer/utils/url-safety.test.ts new file mode 100644 index 00000000000..793ae18ea5b --- /dev/null +++ b/apps/sim/lib/pptx-renderer/utils/url-safety.test.ts @@ -0,0 +1,16 @@ +import { describe, expect, it } from 'vitest' +import { isAllowedExternalUrl } from '@/lib/pptx-renderer/utils/url-safety' + +describe('isAllowedExternalUrl', () => { + it('allows http, https, and mailto URLs', () => { + expect(isAllowedExternalUrl('https://example.com/deck')).toBe(true) + expect(isAllowedExternalUrl('http://example.com/deck')).toBe(true) + expect(isAllowedExternalUrl('mailto:support@example.com')).toBe(true) + }) + + it('rejects scriptable, data, and relative URLs', () => { + expect(isAllowedExternalUrl('javascript:alert(1)')).toBe(false) + expect(isAllowedExternalUrl('data:text/html,')).toBe(false) + expect(isAllowedExternalUrl('/workspace/files')).toBe(false) + }) +}) diff --git a/apps/sim/lib/pptx-renderer/utils/url-safety.ts b/apps/sim/lib/pptx-renderer/utils/url-safety.ts new file mode 100644 index 00000000000..c3e8f42b2de --- /dev/null +++ b/apps/sim/lib/pptx-renderer/utils/url-safety.ts @@ -0,0 +1,17 @@ +/** + * URL safety utilities for external hyperlinks/media in untrusted PPTX content. + */ + +const ALLOWED_PROTOCOLS = new Set(['http:', 'https:', 'mailto:']) + +/** + * Returns true only for absolute URLs with an allowed protocol. + */ +export function isAllowedExternalUrl(url: string): boolean { + try { + const parsed = new URL(url) + return ALLOWED_PROTOCOLS.has(parsed.protocol.toLowerCase()) + } catch { + return false + } +} diff --git a/apps/sim/package.json b/apps/sim/package.json index 766b04cb567..77619302061 100644 --- a/apps/sim/package.json +++ b/apps/sim/package.json @@ -108,7 +108,7 @@ "ajv": "8.18.0", "better-auth": "1.3.12", "better-auth-harmony": "1.3.1", - "binary-extensions": "^2.0.0", + "binary-extensions": "3.1.0", "browser-image-compression": "^2.0.2", "cheerio": "1.1.2", "class-variance-authority": "^0.7.1", @@ -122,6 +122,7 @@ "docx": "^9.6.1", "docx-preview": "^0.3.7", "drizzle-orm": "^0.45.2", + "echarts": "6.0.0", "es-toolkit": "1.45.1", "ffmpeg-static": "5.3.0", "fluent-ffmpeg": "2.1.3", @@ -130,7 +131,6 @@ "google-auth-library": "10.5.0", "gray-matter": "^4.0.3", "groq-sdk": "^0.15.0", - "hast-util-to-html": "9.0.5", "html-to-image": "1.11.13", "html-to-text": "^9.0.5", "idb-keyval": "6.2.2", @@ -167,7 +167,6 @@ "posthog-js": "1.364.4", "posthog-node": "5.28.9", "pptxgenjs": "4.0.1", - "pptxviewjs": "1.1.8", "prismjs": "^1.30.0", "react": "19.2.4", "react-dom": "19.2.4", diff --git a/bun.lock b/bun.lock index d682cb37cdf..4a48daf8ee8 100644 --- a/bun.lock +++ b/bun.lock @@ -1,6 +1,5 @@ { "lockfileVersion": 1, - "configVersion": 1, "workspaces": { "": { "name": "simstudio", @@ -163,7 +162,7 @@ "ajv": "8.18.0", "better-auth": "1.3.12", "better-auth-harmony": "1.3.1", - "binary-extensions": "^2.0.0", + "binary-extensions": "3.1.0", "browser-image-compression": "^2.0.2", "cheerio": "1.1.2", "class-variance-authority": "^0.7.1", @@ -177,6 +176,7 @@ "docx": "^9.6.1", "docx-preview": "^0.3.7", "drizzle-orm": "^0.45.2", + "echarts": "6.0.0", "es-toolkit": "1.45.1", "ffmpeg-static": "5.3.0", "fluent-ffmpeg": "2.1.3", @@ -185,7 +185,6 @@ "google-auth-library": "10.5.0", "gray-matter": "^4.0.3", "groq-sdk": "^0.15.0", - "hast-util-to-html": "9.0.5", "html-to-image": "1.11.13", "html-to-text": "^9.0.5", "idb-keyval": "6.2.2", @@ -222,7 +221,6 @@ "posthog-js": "1.364.4", "posthog-node": "5.28.9", "pptxgenjs": "4.0.1", - "pptxviewjs": "1.1.8", "prismjs": "^1.30.0", "react": "19.2.4", "react-dom": "19.2.4", @@ -501,9 +499,9 @@ "@ai-sdk/cerebras": ["@ai-sdk/cerebras@1.0.44", "", { "dependencies": { "@ai-sdk/openai-compatible": "1.0.39", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-2w7+jq0bWEF6McgWPb2gjaEx1TpqdUq4eyX/gPLTp7HzfDZKEVmmVXRvnKvjzBP/VH7xW4OT5jhTpTPTfYNYYQ=="], - "@ai-sdk/deepseek": ["@ai-sdk/deepseek@1.0.39", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-5TXw7Pm0+/YL2WdnZpXBgruPayhqBgBMNDL95V14Sf4MQz+RmNMhansvK8Fv9Dcgp3Y0p7EasNsPWYJOfj0zoA=="], + "@ai-sdk/deepseek": ["@ai-sdk/deepseek@1.0.40", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-T5zW4s4/aKZLTjUulB9hUOFB6kxNCiVGPUP3xuZyVAlOWBye51KELmI3pCHSfMCrJcuA5Xhlg7ykO2JRW9Qq3Q=="], - "@ai-sdk/gateway": ["@ai-sdk/gateway@2.0.87", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-9aPUt/pJb2NY1HPeJIGHBPUxZiZu+EX1aNyBCGDynHtLzCBaZCANMWUxrluxmGLpoYTRik+WxLzUMSZS/FEGew=="], + "@ai-sdk/gateway": ["@ai-sdk/gateway@2.0.88", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-H62l0gxr4K0rdR2WHbvck2wOKMsocAjdZg41Exsj9Qf5/TyAuHzcNt9jKNv5t2vRFXFZaCpbC5uCCxgUC/GiaA=="], "@ai-sdk/google": ["@ai-sdk/google@2.0.72", "", { "dependencies": { "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-BjDY6l+rV4CmHKjZe4H0uRXW3M2o+g7PaYM8oFpW+9PP1qKNEybnJ6//Si7BSf6DT+86dKARrtEl09lxSSaMaA=="], @@ -905,7 +903,7 @@ "@grpc/grpc-js": ["@grpc/grpc-js@1.14.3", "", { "dependencies": { "@grpc/proto-loader": "^0.8.0", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA=="], - "@grpc/proto-loader": ["@grpc/proto-loader@0.8.0", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.5.3", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ=="], + "@grpc/proto-loader": ["@grpc/proto-loader@0.8.1", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.5.5", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-wtF6h+DY6M3YaDBPAmvuuA6jV8Sif9MjtOI5euKFWRgCDl5PeDpPsHR9u2l6St5ceY8AZgoNDww5+HvEsXFsGg=="], "@hexagon/base64": ["@hexagon/base64@1.1.28", "", {}, "sha512-lhqDEAvWixy3bZ+UOYbPwUbBkwBq5C1LAJ/xPC8Oi+lL54oyakv/npbA0aU2hgCsx/1NUd4IBvV03+aUBWxerw=="], @@ -915,7 +913,7 @@ "@iconify/types": ["@iconify/types@2.0.0", "", {}, "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg=="], - "@iconify/utils": ["@iconify/utils@3.1.2", "", { "dependencies": { "@antfu/install-pkg": "^1.1.0", "@iconify/types": "^2.0.0", "import-meta-resolve": "^4.2.0" } }, "sha512-jVf75icVVgSVGf9+QWBeCHdFL35yZ06HMHl9sCa059pITTP781lOacvRazfwAmXDKiBiUdQQMWVnuiw/RaQNhQ=="], + "@iconify/utils": ["@iconify/utils@3.1.3", "", { "dependencies": { "@antfu/install-pkg": "^1.1.0", "@iconify/types": "^2.0.0", "import-meta-resolve": "^4.2.0" } }, "sha512-LPKOXPn/zV+zis1oOfGWogaXVpqUybF3ZS6SCZIsz8vg0ivVp9+fVqyYB7xq0aiST/VhUQYGO1qo6uoYSiEJqw=="], "@img/colour": ["@img/colour@1.1.0", "", {}, "sha512-Td76q7j57o/tLVdgS746cYARfSyxk8iEfRxewL9h4OMzYhbW4TAcppl0mT4eyqXddh6L/jwoM75mo7ixa/pCeQ=="], @@ -1607,39 +1605,39 @@ "@t3-oss/env-nextjs": ["@t3-oss/env-nextjs@0.13.4", "", { "dependencies": { "@t3-oss/env-core": "0.13.4" }, "peerDependencies": { "typescript": ">=5.0.0", "valibot": "^1.0.0-beta.7 || ^1.0.0", "zod": "^3.24.0 || ^4.0.0-beta.0" }, "optionalPeers": ["typescript", "valibot", "zod"] }, "sha512-6ecXR7SH7zJKVcBODIkB7wV9QLMU23uV8D9ec6P+ULHJ5Ea/YXEHo+Z/2hSYip5i9ptD/qZh8VuOXyldspvTTg=="], - "@tabler/icons": ["@tabler/icons@3.42.0", "", {}, "sha512-h0nFIRgwrE/9iVgN+GuLijbiLIBWJ3chNvIWhqUZhy4D9fv3tkoQ3EYFAvxvfdvQUNNVAhJhj+ar54y6t016Vg=="], + "@tabler/icons": ["@tabler/icons@3.44.0", "", {}, "sha512-Wn0AOZG9sg0L+bjfMqq4eNhC6pQjIrk94LvvWYNYkY8KH8wC3YILRzQlrnVJc4FUeMxH/AK97QsYCX35H3LndA=="], - "@tabler/icons-react": ["@tabler/icons-react@3.42.0", "", { "dependencies": { "@tabler/icons": "3.42.0" }, "peerDependencies": { "react": ">= 16" } }, "sha512-WvKhHYLdJaZbiY4Jm31fmTbzIwxokXcE1HM/m9rmXvh7UoHG4mM8n+9NOB6xEwB5SZQ+G/Z102eMj1F3NqDMVg=="], + "@tabler/icons-react": ["@tabler/icons-react@3.44.0", "", { "dependencies": { "@tabler/icons": "3.44.0" }, "peerDependencies": { "react": ">= 16" } }, "sha512-8+rvzBbVm/1Z3sG3x7GUNAaxIKxwgz8xaMhRs23nrCnMTKRFAhEC+82zAIFeAA0seXdrAGX5HFCkaLpGK2rVHg=="], - "@tailwindcss/node": ["@tailwindcss/node@4.2.4", "", { "dependencies": { "@jridgewell/remapping": "^2.3.5", "enhanced-resolve": "^5.19.0", "jiti": "^2.6.1", "lightningcss": "1.32.0", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.2.4" } }, "sha512-Ai7+yQPxz3ddrDQzFfBKdHEVBg0w3Zl83jnjuwxnZOsnH9pGn93QHQtpU0p/8rYWxvbFZHneni6p1BSLK4DkGA=="], + "@tailwindcss/node": ["@tailwindcss/node@4.3.0", "", { "dependencies": { "@jridgewell/remapping": "^2.3.5", "enhanced-resolve": "^5.21.0", "jiti": "^2.6.1", "lightningcss": "1.32.0", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.3.0" } }, "sha512-aFb4gUhFOgdh9AXo4IzBEOzBkkAxm9VigwDJnMIYv3lcfXCJVesNfbEaBl4BNgVRyid92AmdviqwBUBRKSeY3g=="], - "@tailwindcss/oxide": ["@tailwindcss/oxide@4.2.4", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.2.4", "@tailwindcss/oxide-darwin-arm64": "4.2.4", "@tailwindcss/oxide-darwin-x64": "4.2.4", "@tailwindcss/oxide-freebsd-x64": "4.2.4", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.4", "@tailwindcss/oxide-linux-arm64-gnu": "4.2.4", "@tailwindcss/oxide-linux-arm64-musl": "4.2.4", "@tailwindcss/oxide-linux-x64-gnu": "4.2.4", "@tailwindcss/oxide-linux-x64-musl": "4.2.4", "@tailwindcss/oxide-wasm32-wasi": "4.2.4", "@tailwindcss/oxide-win32-arm64-msvc": "4.2.4", "@tailwindcss/oxide-win32-x64-msvc": "4.2.4" } }, "sha512-9El/iI069DKDSXwTvB9J4BwdO5JhRrOweGaK25taBAvBXyXqJAX+Jqdvs8r8gKpsI/1m0LeJLyQYTf/WLrBT1Q=="], + "@tailwindcss/oxide": ["@tailwindcss/oxide@4.3.0", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.3.0", "@tailwindcss/oxide-darwin-arm64": "4.3.0", "@tailwindcss/oxide-darwin-x64": "4.3.0", "@tailwindcss/oxide-freebsd-x64": "4.3.0", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.3.0", "@tailwindcss/oxide-linux-arm64-gnu": "4.3.0", "@tailwindcss/oxide-linux-arm64-musl": "4.3.0", "@tailwindcss/oxide-linux-x64-gnu": "4.3.0", "@tailwindcss/oxide-linux-x64-musl": "4.3.0", "@tailwindcss/oxide-wasm32-wasi": "4.3.0", "@tailwindcss/oxide-win32-arm64-msvc": "4.3.0", "@tailwindcss/oxide-win32-x64-msvc": "4.3.0" } }, "sha512-F7HZGBeN9I0/AuuJS5PwcD8xayx5ri5GhjYUDBEVYUkexyA/giwbDNjRVrxSezE3T250OU2K/wp/ltWx3UOefg=="], - "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.2.4", "", { "os": "android", "cpu": "arm64" }, "sha512-e7MOr1SAn9U8KlZzPi1ZXGZHeC5anY36qjNwmZv9pOJ8E4Q6jmD1vyEHkQFmNOIN7twGPEMXRHmitN4zCMN03g=="], + "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.3.0", "", { "os": "android", "cpu": "arm64" }, "sha512-TJPiq67tKlLuObP6RkwvVGDoxCMBVtDgKkLfa/uyj7/FyxvQwHS+UOnVrXXgbEsfUaMgiVvC4KbJnRr26ho4Ng=="], - "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.2.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-tSC/Kbqpz/5/o/C2sG7QvOxAKqyd10bq+ypZNf+9Fi2TvbVbv1zNpcEptcsU7DPROaSbVgUXmrzKhurFvo5eDg=="], + "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.3.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-oMN/WZRb+SO37BmUElEgeEWuU8E/HXRkiODxJxLe1UTHVXLrdVSgfaJV7pSlhRGMSOiXLuxTIjfsF3wYvz8cgQ=="], - "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.2.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-yPyUXn3yO/ufR6+Kzv0t4fCg2qNr90jxXc5QqBpjlPNd0NqyDXcmQb/6weunH/MEDXW5dhyEi+agTDiqa3WsGg=="], + "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.3.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-N6CUmu4a6bKVADfw77p+iw6Yd9Q3OBhe0veaDX+QazfuVYlQsHfDgxBrsjQ/IW+zywL8mTrNd0SdJT/zgtvMdA=="], - "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.2.4", "", { "os": "freebsd", "cpu": "x64" }, "sha512-BoMIB4vMQtZsXdGLVc2z+P9DbETkiopogfWZKbWwM8b/1Vinbs4YcUwo+kM/KeLkX3Ygrf4/PsRndKaYhS8Eiw=="], + "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.3.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-zDL5hBkQdH5C6MpqbK3gQAgP80tsMwSI26vjOzjJtNCMUo0lFgOItzHKBIupOZNQxt3ouPH7RPhvNhiTfCe5CQ=="], - "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.2.4", "", { "os": "linux", "cpu": "arm" }, "sha512-7pIHBLTHYRAlS7V22JNuTh33yLH4VElwKtB3bwchK/UaKUPpQ0lPQiOWcbm4V3WP2I6fNIJ23vABIvoy2izdwA=="], + "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.3.0", "", { "os": "linux", "cpu": "arm" }, "sha512-R06HdNi7A7OEoMsf6d4tjZ71RCWnZQPHj2mnotSFURjNLdBC+cIgXQ7l81CqeoiQftjf6OOblxXMInMgN2VzMA=="], - "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.2.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-+E4wxJ0ZGOzSH325reXTWB48l42i93kQqMvDyz5gqfRzRZ7faNhnmvlV4EPGJU3QJM/3Ab5jhJ5pCRUsKn6OQw=="], + "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.3.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-qTJHELX8jetjhRQHCLilkVLmybpzNQAtaI/gaoVoidn/ufbNDbAo8KlK2J+yPoc8wQxvDxCmh/5lr8nC1+lTbg=="], - "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.2.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-bBADEGAbo4ASnppIziaQJelekCxdMaxisrk+fB7Thit72IBnALp9K6ffA2G4ruj90G9XRS2VQ6q2bCKbfFV82g=="], + "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.3.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-Z6sukiQsngnWO+l39X4pPbiWT81IC+PLKF+PHxIlyZbGNb9MODfYlXEVlFvej5BOZInWX01kVyzeLvHsXhfczQ=="], - "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.2.4", "", { "os": "linux", "cpu": "x64" }, "sha512-7Mx25E4WTfnht0TVRTyC00j3i0M+EeFe7wguMDTlX4mRxafznw0CA8WJkFjWYH5BlgELd1kSjuU2JiPnNZbJDA=="], + "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.3.0", "", { "os": "linux", "cpu": "x64" }, "sha512-DRNdQRpSGzRGfARVuVkxvM8Q12nh19l4BF/G7zGA1oe+9wcC6saFBHTISrpIcKzhiXtSrlSrluCfvMuledoCTQ=="], - "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.2.4", "", { "os": "linux", "cpu": "x64" }, "sha512-2wwJRF7nyhOR0hhHoChc04xngV3iS+akccHTGtz965FwF0up4b2lOdo6kI1EbDaEXKgvcrFBYcYQQ/rrnWFVfA=="], + "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.3.0", "", { "os": "linux", "cpu": "x64" }, "sha512-Z0IADbDo8bh6I7h2IQMx601AdXBLfFpEdUotft86evd/8ZPflZe9COPO8Q1vw+pfLWIUo9zN/JGZvwuAJqduqg=="], - "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.2.4", "", { "dependencies": { "@emnapi/core": "^1.8.1", "@emnapi/runtime": "^1.8.1", "@emnapi/wasi-threads": "^1.1.0", "@napi-rs/wasm-runtime": "^1.1.1", "@tybys/wasm-util": "^0.10.1", "tslib": "^2.8.1" }, "cpu": "none" }, "sha512-FQsqApeor8Fo6gUEklzmaa9994orJZZDBAlQpK2Mq+DslRKFJeD6AjHpBQ0kZFQohVr8o85PPh8eOy86VlSCmw=="], + "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.3.0", "", { "dependencies": { "@emnapi/core": "^1.10.0", "@emnapi/runtime": "^1.10.0", "@emnapi/wasi-threads": "^1.2.1", "@napi-rs/wasm-runtime": "^1.1.4", "@tybys/wasm-util": "^0.10.1", "tslib": "^2.8.1" }, "cpu": "none" }, "sha512-HNZGOUxEmElksYR7S6sC5jTeNGpobAsy9u7Gu0AskJ8/20FR9GqebUyB+HBcU/ax6BHuiuJi+Oda4B+YX6H1yA=="], - "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.2.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-L9BXqxC4ToVgwMFqj3pmZRqyHEztulpUJzCxUtLjobMCzTPsGt1Fa9enKbOpY2iIyVtaHNeNvAK8ERP/64sqGQ=="], + "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.3.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-Pe+RPVTi1T+qymuuRpcdvwSVZjnll/f7n8gBxMMh3xLTctMDKqpdfGimbMyioqtLhUYZxdJ9wGNhV7MKHvgZsQ=="], - "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.2.4", "", { "os": "win32", "cpu": "x64" }, "sha512-ESlKG0EpVJQwRjXDDa9rLvhEAh0mhP1sF7sap9dNZT0yyl9SAG6T7gdP09EH0vIv0UNTlo6jPWyujD6559fZvw=="], + "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.3.0", "", { "os": "win32", "cpu": "x64" }, "sha512-Mvrf2kXW/yeW/OTezZlCGOirXRcUuLIBx/5Y12BaPM7wJoryG6dfS/NJL8aBPqtTEx/Vm4T4vKzFUcKDT+TKUA=="], - "@tailwindcss/postcss": ["@tailwindcss/postcss@4.2.4", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "@tailwindcss/node": "4.2.4", "@tailwindcss/oxide": "4.2.4", "postcss": "^8.5.6", "tailwindcss": "4.2.4" } }, "sha512-wgAVj6nUWAolAu8YFvzT2cTBIElWHkjZwFYovF+xsqKsW2ADxM/X2opxj5NsF/qVccAOjRNe8X2IdPzMsWyHTg=="], + "@tailwindcss/postcss": ["@tailwindcss/postcss@4.3.0", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "@tailwindcss/node": "4.3.0", "@tailwindcss/oxide": "4.3.0", "postcss": "^8.5.10", "tailwindcss": "4.3.0" } }, "sha512-Jm05Tjx+9yCLGv5qw1c+84Psds8MnyrEQYCB+FFk2lgGiUjlRqdxke4mVTuYrj2xnVZqKim2Apr5ySuQRYAw/w=="], "@tailwindcss/typography": ["@tailwindcss/typography@0.5.19", "", { "dependencies": { "postcss-selector-parser": "6.0.10" }, "peerDependencies": { "tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1" } }, "sha512-w31dd8HOx3k9vPtcQh5QHP9GwKcgbMp87j58qi6xgiBnFFtKEAgCWnDw4qUT8aHwkCp8bKvb/KGKWWHedP0AAg=="], @@ -1759,7 +1757,7 @@ "@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="], - "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="], + "@types/estree": ["@types/estree@1.0.9", "", {}, "sha512-GhdPgy1el4/ImP05X05Uw4cw2/M93BCUmnEvWZNStlCzEKME4Fkk+YpoA5OiHNQmoS7Cafb8Xa3Pya8m1Qrzeg=="], "@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="], @@ -1789,7 +1787,7 @@ "@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="], - "@types/node": ["@types/node@22.19.17", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q=="], + "@types/node": ["@types/node@22.19.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-9v00a+dn2yWVsYDEunWC4g/TcRKVq3r8N5FuZp7u0SGrPvdN9c2yXI9bBuf5Fl0hNCb+QTIePTn5pJs2pwBOQQ=="], "@types/node-fetch": ["@types/node-fetch@2.6.13", "", { "dependencies": { "@types/node": "*", "form-data": "^4.0.4" } }, "sha512-QGpRVpzSaUs30JBSGPjOg4Uveu384erbHBoT1zeONvyCfwQxIkUshLAOqN/k9EjGviPRmWTTe6aH2qySWKTVSw=="], @@ -1885,7 +1883,7 @@ "agentkeepalive": ["agentkeepalive@4.6.0", "", { "dependencies": { "humanize-ms": "^1.2.1" } }, "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ=="], - "ai": ["ai@5.0.185", "", { "dependencies": { "@ai-sdk/gateway": "2.0.87", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-TQrpK5+R1xsQQH1YwY2Qnt1usZTVSDLiDg0Lda6vspC/G4a40aBs4b741Lr1ZNl8g1fu6gANyeK9C8Hz9p3O5A=="], + "ai": ["ai@5.0.186", "", { "dependencies": { "@ai-sdk/gateway": "2.0.88", "@ai-sdk/provider": "2.0.3", "@ai-sdk/provider-utils": "3.0.25", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-0HVwYO9k/x5eSNggqya/75uirBLjkZoL5QdNp9ftjOCl/IXWSzqys/SzsL3ifWBz603a0KbW+EZyYVtmbFJrTQ=="], "ajv": ["ajv@8.18.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A=="], @@ -1963,7 +1961,7 @@ "base64id": ["base64id@2.0.0", "", {}, "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog=="], - "baseline-browser-mapping": ["baseline-browser-mapping@2.10.27", "", { "bin": { "baseline-browser-mapping": "dist/cli.cjs" } }, "sha512-zEs/ufmZoUd7WftKpKyXaT6RFxpQ5Qm9xytKRHvJfxFV9DFJkZph9RvJ1LcOUi0Z1ZVijMte65JbILeV+8QQEA=="], + "baseline-browser-mapping": ["baseline-browser-mapping@2.10.29", "", { "bin": { "baseline-browser-mapping": "dist/cli.cjs" } }, "sha512-Asa2krT+XTPZINCS+2QcyS8WTkObE77RwkydwF7h6DmnKqbvlalz93m/dnphUyCa6SWSP51VgtEUf2FN+gelFQ=="], "basic-auth": ["basic-auth@2.0.1", "", { "dependencies": { "safe-buffer": "5.1.2" } }, "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg=="], @@ -1981,7 +1979,7 @@ "bignumber.js": ["bignumber.js@9.3.1", "", {}, "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ=="], - "binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="], + "binary-extensions": ["binary-extensions@3.1.0", "", {}, "sha512-Jvvd9hy1w+xUad8+ckQsWA/V1AoyubOvqn0aygjMOVM4BfIaRav1NFS3LsTSDaV4n4FtcCtQXvzep1E6MboqwQ=="], "bintrees": ["bintrees@1.0.2", "", {}, "sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw=="], @@ -1995,7 +1993,7 @@ "bowser": ["bowser@2.14.1", "", {}, "sha512-tzPjzCxygAKWFOJP011oxFHs57HzIhOEracIgAePE4pqB3LikALKnSzUyU4MGs9/iCEUuHlAJTjTc5M+u7YEGg=="], - "brace-expansion": ["brace-expansion@5.0.5", "", { "dependencies": { "balanced-match": "^4.0.2" } }, "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ=="], + "brace-expansion": ["brace-expansion@5.0.6", "", { "dependencies": { "balanced-match": "^4.0.2" } }, "sha512-kLpxurY4Z4r9sgMsyG0Z9uzsBlgiU/EFKhj/h91/8yHu0edo7XuixOIH3VcJ8kkxs6/jPzoI6U9Vj3WqbMQ94g=="], "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], @@ -2357,11 +2355,13 @@ "ecdsa-sig-formatter": ["ecdsa-sig-formatter@1.0.11", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ=="], + "echarts": ["echarts@6.0.0", "", { "dependencies": { "tslib": "2.3.0", "zrender": "6.0.0" } }, "sha512-Tte/grDQRiETQP4xz3iZWSvoHrkCQtwqd6hs+mifXcjrCuo2iKWbajFObuLJVBlDIJlOzgQPd1hsaKt/3+OMkQ=="], + "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], "effect": ["effect@3.21.0", "", { "dependencies": { "@standard-schema/spec": "^1.0.0", "fast-check": "^3.23.1" } }, "sha512-PPN80qRokCd1f015IANNhrwOnLO7GrrMQfk4/lnZRE/8j7UPWrNNjPV0uBrZutI/nHzernbW+J0hdqQysHiSnQ=="], - "electron-to-chromium": ["electron-to-chromium@1.5.349", "", {}, "sha512-QsWVGyRuY07Aqb234QytTfwd5d9AJlfNIQ5wIOl1L+PZDzI9d9+Fn0FRale/QYlFxt/bUnB0/nLd1jFPGxGK1A=="], + "electron-to-chromium": ["electron-to-chromium@1.5.353", "", {}, "sha512-kOrWphBi8TOZyiJZqsgqIle0lw+tzmnQK83pV9dZUd01Nm2POECSyFQMAuarzZdYqQW7FH9RaYOuaRo3h+bQ3w=="], "emoji-regex": ["emoji-regex@10.6.0", "", {}, "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A=="], @@ -2381,7 +2381,7 @@ "engine.io-parser": ["engine.io-parser@5.2.3", "", {}, "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q=="], - "enhanced-resolve": ["enhanced-resolve@5.21.0", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.3.3" } }, "sha512-otxSQPw4lkOZWkHpB3zaEQs6gWYEsmX4xQF68ElXC/TWvGxGMSGOvoNbaLXm6/cS/fSfHtsEdw90y20PCd+sCA=="], + "enhanced-resolve": ["enhanced-resolve@5.21.2", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.3.3" } }, "sha512-xe9vQb5kReirPUxgQrXA3ihgbCqssmTiM7cOZ+Gzu+VeGWgpV98lLZvp0dl4yriyAePcewxGUs9UpKD8PET9KQ=="], "entities": ["entities@2.2.0", "", {}, "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A=="], @@ -2491,7 +2491,7 @@ "fast-uri": ["fast-uri@3.1.2", "", {}, "sha512-rVjf7ArG3LTk+FS6Yw81V1DLuZl1bRbNrev6Tmd/9RaroeeRRJhAt7jg/6YFxbvAQXUCavSoZhPPj6oOx+5KjQ=="], - "fast-xml-builder": ["fast-xml-builder@1.1.9", "", { "dependencies": { "path-expression-matcher": "^1.1.3" } }, "sha512-jcyKVSEX13iseJqg7n/KWw+xnu/7fdrZ333Fac54KjHDIELVCfDDJXYIm6DTJ0Su4gSzrhqiK0DzY/wZbF40mw=="], + "fast-xml-builder": ["fast-xml-builder@1.2.0", "", { "dependencies": { "path-expression-matcher": "^1.5.0", "xml-naming": "^0.1.0" } }, "sha512-00aAWieqff+ZJhsXA4g1g7M8k+7AYoMUUHF+/zFb5U6Uv/P0Vl4QZo84/IcufzYalLuEj9928bXN9PbbFzMF0Q=="], "fast-xml-parser": ["fast-xml-parser@5.7.3", "", { "dependencies": { "@nodable/entities": "^2.1.0", "fast-xml-builder": "^1.1.7", "path-expression-matcher": "^1.5.0", "strnum": "^2.2.3" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-C0AaNuC+mscy6vrAQKAc/rMq+zAPHodfHGZu4sGVehvAQt/JLG1O5zEcYcXSY5zSqr4YVgxsB+pHXTq0i7eDlg=="], @@ -2565,7 +2565,7 @@ "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="], - "get-east-asian-width": ["get-east-asian-width@1.5.0", "", {}, "sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA=="], + "get-east-asian-width": ["get-east-asian-width@1.6.0", "", {}, "sha512-QRbvDIbx6YklUe6RxeTeleMR0yv3cYH6PsPZHcnVn7xv7zO1BHN8r0XETu8n6Ye3Q+ahtSarc3WgtNWmehIBfA=="], "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], @@ -2711,7 +2711,7 @@ "inquirer": ["inquirer@8.2.7", "", { "dependencies": { "@inquirer/external-editor": "^1.0.0", "ansi-escapes": "^4.2.1", "chalk": "^4.1.1", "cli-cursor": "^3.1.0", "cli-width": "^3.0.0", "figures": "^3.0.0", "lodash": "^4.17.21", "mute-stream": "0.0.8", "ora": "^5.4.1", "run-async": "^2.4.0", "rxjs": "^7.5.5", "string-width": "^4.1.0", "strip-ansi": "^6.0.0", "through": "^2.3.6", "wrap-ansi": "^6.0.1" } }, "sha512-UjOaSel/iddGZJ5xP/Eixh6dY1XghiBw4XK13rCCIJcJfyhhoul/7KhLLUGtebEj6GDYM6Vnx/mVsjx2L/mFIA=="], - "internmap": ["internmap@1.0.1", "", {}, "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw=="], + "internmap": ["internmap@2.0.3", "", {}, "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg=="], "ioredis": ["ioredis@5.10.1", "", { "dependencies": { "@ioredis/commands": "1.5.1", "cluster-key-slot": "^1.1.0", "debug": "^4.3.4", "denque": "^2.1.0", "lodash.defaults": "^4.2.0", "lodash.isarguments": "^3.1.0", "redis-errors": "^1.2.0", "redis-parser": "^3.0.0", "standard-as-callback": "^2.1.0" } }, "sha512-HuEDBTI70aYdx1v6U97SbNx9F1+svQKBDo30o0b9fw055LMepzpOOd0Ccg9Q6tbqmBSJaMuY0fB7yw9/vjBYCA=="], @@ -2845,7 +2845,7 @@ "libmime": ["libmime@5.3.7", "", { "dependencies": { "encoding-japanese": "2.2.0", "iconv-lite": "0.6.3", "libbase64": "1.3.0", "libqp": "2.1.1" } }, "sha512-FlDb3Wtha8P01kTL3P9M+ZDNDWPKPmKHWaU/cG/lg5pfuAwdflVpZE+wm9m7pKmC5ww6s+zTxBKS1p6yl3KpSw=="], - "libphonenumber-js": ["libphonenumber-js@1.12.42", "", {}, "sha512-oKQFPTibqQwZZkChCDVMFVJXMZdyJNqDWZWYNn8BgyAaK/6yFJEowxCY0RVFirRyWP63hMRuKlkSEd9qlvbWXg=="], + "libphonenumber-js": ["libphonenumber-js@1.13.0", "", {}, "sha512-N12qmdu0BM1wVNkMKYOoJR4fTOZDblrKNsOqGbKoUZrYsYLX2zx1O5X+vhK0WJPBU/+/kh9tCr8x0a7t1puGWg=="], "libqp": ["libqp@2.1.1", "", {}, "sha512-0Wd+GPz1O134cP62YU2GTOPNA7Qgl09XwCqM5zpBv87ERCXdfDtyKXvV7c9U22yWJh44QZqBocFnXN11K96qow=="], @@ -3287,7 +3287,7 @@ "path-scurry": ["path-scurry@2.0.2", "", { "dependencies": { "lru-cache": "^11.0.0", "minipass": "^7.1.2" } }, "sha512-3O/iVVsJAPsOnpwWIeD+d6z/7PmqApyQePUtCndjatj/9I5LylHvt5qluFaBT3I5h3r1ejfR056c+FCv+NnNXg=="], - "path-to-regexp": ["path-to-regexp@0.1.13", "", {}, "sha512-A/AGNMFN3c8bOlvV9RreMdrv7jsmF9XIfDeCd87+I8RNg6s78BhJxMu69NEMHBSJFxKidViTEdruRwEk/WIKqA=="], + "path-to-regexp": ["path-to-regexp@8.4.2", "", {}, "sha512-qRcuIdP69NPm4qbACK+aDogI5CBDMi1jKe0ry5rSQJz8JVLsC7jV8XpiJjGRLLol3N+R5ihGYcrPLTno6pAdBA=="], "pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], @@ -3361,8 +3361,6 @@ "pptxgenjs": ["pptxgenjs@4.0.1", "", { "dependencies": { "@types/node": "^22.8.1", "https": "^1.0.0", "image-size": "^1.2.1", "jszip": "^3.10.1" } }, "sha512-TeJISr8wouAuXw4C1F/mC33xbZs/FuEG6nH9FG1Zj+nuPcGMP5YRHl6X+j3HSUnS1f3at6k75ZZXPMZlA5Lj9A=="], - "pptxviewjs": ["pptxviewjs@1.1.8", "", { "peerDependencies": { "chart.js": ">=4.4.1", "jszip": ">=3.10.1" }, "optionalPeers": ["chart.js", "jszip"] }, "sha512-Nk3uIg1H7WkigKIKZPcTrcmV4RMpRSHvG4jWAO9aKPD1MWkOF8fwqtypsF+kzUZvIzO0BA/eKK+zNK7/R7WrDg=="], - "preact": ["preact@10.29.1", "", {}, "sha512-gQCLc/vWroE8lIpleXtdJhTFDogTdZG9AjMUpVkDf2iTCNwYNWA+u16dL41TqUDJO4gm2IgrcMv3uTpjd4Pwmg=="], "prebuild-install": ["prebuild-install@7.1.3", "", { "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^2.0.0", "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" } }, "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug=="], @@ -3387,7 +3385,7 @@ "property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="], - "protobufjs": ["protobufjs@7.5.6", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.5", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.1", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.1", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-M71sTMB146U3u0di3yup8iM+zv8yPRNQVr1KK4tyBitl3qFvEGucq/rGDRShD2rsJhtN02RJaJ7j5X5hmy8SJg=="], + "protobufjs": ["protobufjs@7.5.7", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.5", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.1", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.1", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-NGnrxS/nLKUo5nkbVQxlC71sB4hdfImdYIbFeSCidxtwATx0AHRPcANSLd0q5Bb2BkoSWo2iisQhGg5/r+ihbA=="], "proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], @@ -3599,7 +3597,7 @@ "selderee": ["selderee@0.11.0", "", { "dependencies": { "parseley": "^0.12.0" } }, "sha512-5TF+l7p4+OsnP8BCCvSyZiSPc4x4//p5uPwK8TCnVPJYRmU2aYKMpOXvw8zM5a5JvuuCGN1jmsMwuU2W02ukfA=="], - "semver": ["semver@7.7.4", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA=="], + "semver": ["semver@7.8.0", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-AcM7dV/5ul4EekoQ29Agm5vri8JNqRyj39o0qpX6vDF2GZrtutZl5RwgD1XnZjiTAfncsJhMI48QQH3sN87YNA=="], "send": ["send@1.2.1", "", { "dependencies": { "debug": "^4.4.3", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", "http-errors": "^2.0.1", "mime-types": "^3.0.2", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", "statuses": "^2.0.2" } }, "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ=="], @@ -3737,7 +3735,7 @@ "stripe": ["stripe@18.5.0", "", { "dependencies": { "qs": "^6.11.0" }, "peerDependencies": { "@types/node": ">=12.x.x" }, "optionalPeers": ["@types/node"] }, "sha512-Hp+wFiEQtCB0LlNgcFh5uVyKznpDjzyUZ+CNVEf+I3fhlYvh7rZruIg+jOwzJRCpy0ZTPMjlzm7J2/M2N6d+DA=="], - "strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], + "strnum": ["strnum@2.3.0", "", {}, "sha512-ums3KNd42PGyx5xaoVTO1mjU1bH3NpY4vsrVlnv9PNGqQj8wd7rJ6nEypLrJ7z5vxK5RP0yMLo6J/Gsm62DI5Q=="], "strtok3": ["strtok3@6.3.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^4.1.0" } }, "sha512-fZtbhtvI9I48xDSywd/somNqgUHl2L2cstmXCCif0itOf96jeW18MBSyrLuNicYQVkvpOxkZtkzujiTJ9LW5Jw=="], @@ -3763,13 +3761,13 @@ "tailwind-merge": ["tailwind-merge@3.5.0", "", {}, "sha512-I8K9wewnVDkL1NTGoqWmVEIlUcB9gFriAEkXkfCjX5ib8ezGxtR3xD7iZIxrfArjEsH7F1CHD4RFUtxefdqV/A=="], - "tailwindcss": ["tailwindcss@4.2.4", "", {}, "sha512-HhKppgO81FQof5m6TEnuBWCZGgfRAWbaeOaGT00KOy/Pf/j6oUihdvBpA7ltCeAvZpFhW3j0PTclkxsd4IXYDA=="], + "tailwindcss": ["tailwindcss@4.3.0", "", {}, "sha512-y6nxMGB1nMW9R6k96e5gdIFzcfL/gTJRNaqGes1YvkLnPVXzWgbqFF2yLC0T8G774n24cx3Pe8XrKoniCOAH+Q=="], "tailwindcss-animate": ["tailwindcss-animate@1.0.7", "", { "peerDependencies": { "tailwindcss": ">=3.0.0 || insiders" } }, "sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA=="], "tapable": ["tapable@2.3.3", "", {}, "sha512-uxc/zpqFg6x7C8vOE7lh6Lbda8eEL9zmVm/PLeTPBRhh1xCgdWaQ+J1CUieGpIfm2HdtsUpRv+HshiasBMcc6A=="], - "tar": ["tar@7.5.14", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-/7sHKgQO3JLP9ESlwTYUUftHUadOURUqq23xs1vjcnp8Vss6k0wCfzulyEtk5g91pjvnuriimGlyG7k6msrzRw=="], + "tar": ["tar@7.5.15", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-dzGK0boVlC4W5QFuQN1EFSl3bIDYsk7Tj40U6eIBnK2k/8ml7TZ5agbI5j5+qnoVcAA+rNtBml8SEiLxZpNqRQ=="], "tar-fs": ["tar-fs@2.1.4", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ=="], @@ -3937,7 +3935,7 @@ "vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw=="], - "vite": ["vite@7.3.2", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-Bby3NOsna2jsjfLVOHKes8sGwgl4TT0E6vvpYgnAYDIF/tie7MRaFthmKuHx1NSXjiTueXH3do80FMQgvEktRg=="], + "vite": ["vite@7.3.3", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-/4XH147Ui7OGTjg3HbdWe5arnZQSbfuRzdr9Ec7TQi5I7R+ir0Rlc9GIvD4v0XZurELqA035KVXJXpR61xhiTA=="], "vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg=="], @@ -3989,7 +3987,7 @@ "ws": ["ws@8.20.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA=="], - "xlsx": ["xlsx@https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz", { "bin": { "xlsx": "./bin/xlsx.njs" } }, "sha512-oLDq3jw7AcLqKWH2AhCpVTZl8mf6X2YReP+Neh0SJUzV/BdZYjth94tG5toiMB1PPrYtxOCfaoUCkvtuH+3AJA=="], + "xlsx": ["xlsx@https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz", { "bin": { "xlsx": "./bin/xlsx.njs" } }], "xml": ["xml@1.0.1", "", {}, "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw=="], @@ -4001,6 +3999,8 @@ "xml-name-validator": ["xml-name-validator@5.0.0", "", {}, "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg=="], + "xml-naming": ["xml-naming@0.1.0", "", {}, "sha512-k8KO9hrMyNk6tUWqUfkTEZbezRRpONVOzUTnc97VnCvyj6Tf9lyUR9EDAIeiVLv56jsMcoXEwjW8Kv5yPY52lw=="], + "xml2js": ["xml2js@0.5.0", "", { "dependencies": { "sax": ">=0.6.0", "xmlbuilder": "~11.0.0" } }, "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA=="], "xmlbuilder": ["xmlbuilder@10.1.1", "", {}, "sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg=="], @@ -4035,6 +4035,8 @@ "zod-validation-error": ["zod-validation-error@1.5.0", "", { "peerDependencies": { "zod": "^3.18.0" } }, "sha512-/7eFkAI4qV0tcxMBB/3+d2c1P6jzzZYdYSlBuAklzMuCrJu5bzJfHS0yVAS87dRHVlhftd6RFJDIvv03JgkSbw=="], + "zrender": ["zrender@6.0.0", "", { "dependencies": { "tslib": "2.3.0" } }, "sha512-41dFXEEXuJpNecuUQq6JlbybmnHaqqpGlbH1yxnA5V9MMP4SbohSVZsJIwz+zdjQXSSlR1Vc34EgH1zxyTDvhg=="], + "zustand": ["zustand@5.0.13", "", { "peerDependencies": { "@types/react": ">=18.0.0", "immer": ">=9.0.6", "react": ">=18.0.0", "use-sync-external-store": ">=1.2.0" }, "optionalPeers": ["@types/react", "immer", "react", "use-sync-external-store"] }, "sha512-efI2tVaVQPqtOh114loML/Z80Y4NP3yc+Ff0fYiZJPauNeWZeIp/bRFD7I9bfmCOYBh/PHxlglQ9+wvlwnPikQ=="], "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], @@ -4243,17 +4245,15 @@ "@trigger.dev/sdk/cronstrue": ["cronstrue@2.61.0", "", { "bin": { "cronstrue": "bin/cli.js" } }, "sha512-ootN5bvXbIQI9rW94+QsXN5eROtXWwew6NkdGxIRpS/UFWRggL0G5Al7a9GTBFEsuvVhJ2K3CntIIVt7L2ILhA=="], - "@trigger.dev/sdk/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], - "@trigger.dev/sdk/uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="], - "@types/cors/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "@types/cors/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], "@types/fluent-ffmpeg/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], "@types/jsdom/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], - "@types/node-fetch/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "@types/node-fetch/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], "@types/nodemailer/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], @@ -4261,11 +4261,11 @@ "@types/ssh2/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="], - "@types/through/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "@types/through/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], - "@types/ws/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "@types/ws/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], - "@types/yauzl/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "@types/yauzl/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], "accepts/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], @@ -4293,7 +4293,7 @@ "c12/pkg-types": ["pkg-types@2.3.1", "", { "dependencies": { "confbox": "^0.2.4", "exsolve": "^1.0.8", "pathe": "^2.0.3" } }, "sha512-y+ichcgc2LrADuhLNAx8DFjVfgz91pRxfZdI3UDhxHvcVEZsenLO+7XaU5vOp0u/7V/wZ+plyuQxtrDlZJ+yeg=="], - "chrome-launcher/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "chrome-launcher/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], "chromium-bidi/zod": ["zod@3.23.8", "", {}, "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g=="], @@ -4317,7 +4317,7 @@ "d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw=="], - "docx/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "docx/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], "docx/nanoid": ["nanoid@5.1.11", "", { "bin": { "nanoid": "bin/nanoid.js" } }, "sha512-v+KEsUv2ps74PaSKv0gHTxTCgMXOIfBEbaqa6w6ISIGC7ZsvHN4N9oJ8d4cmf0n5oTzQz2SLmThbQWhjd/8eKg=="], @@ -4325,9 +4325,11 @@ "e2b/glob": ["glob@11.1.0", "", { "dependencies": { "foreground-child": "^3.3.1", "jackspeak": "^4.1.1", "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw=="], + "echarts/tslib": ["tslib@2.3.0", "", {}, "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg=="], + "encoding-sniffer/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], - "engine.io/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "engine.io/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], "engine.io/ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="], @@ -4395,6 +4397,8 @@ "inquirer/ora": ["ora@5.4.1", "", { "dependencies": { "bl": "^4.1.0", "chalk": "^4.1.0", "cli-cursor": "^3.1.0", "cli-spinners": "^2.5.0", "is-interactive": "^1.0.0", "is-unicode-supported": "^0.1.0", "log-symbols": "^4.1.0", "strip-ansi": "^6.0.0", "wcwidth": "^1.0.1" } }, "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ=="], + "is-binary-path/binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="], + "isomorphic-unfetch/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], "katex/commander": ["commander@8.3.0", "", {}, "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww=="], @@ -4483,7 +4487,7 @@ "posthog-js/fflate": ["fflate@0.4.8", "", {}, "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA=="], - "protobufjs/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "protobufjs/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], "proxy-addr/ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], @@ -4517,7 +4521,7 @@ "restore-cursor/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], - "router/path-to-regexp": ["path-to-regexp@8.4.2", "", {}, "sha512-qRcuIdP69NPm4qbACK+aDogI5CBDMi1jKe0ry5rSQJz8JVLsC7jV8XpiJjGRLLol3N+R5ihGYcrPLTno6pAdBA=="], + "rollup/@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="], "sim/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], @@ -4527,11 +4531,11 @@ "sim/tailwindcss": ["tailwindcss@3.4.19", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", "chokidar": "^3.6.0", "didyoumean": "^1.2.2", "dlv": "^1.1.3", "fast-glob": "^3.3.2", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", "jiti": "^1.21.7", "lilconfig": "^3.1.3", "micromatch": "^4.0.8", "normalize-path": "^3.0.0", "object-hash": "^3.0.0", "picocolors": "^1.1.1", "postcss": "^8.4.47", "postcss-import": "^15.1.0", "postcss-js": "^4.0.1", "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", "postcss-nested": "^6.2.0", "postcss-selector-parser": "^6.1.2", "resolve": "^1.22.8", "sucrase": "^3.35.0" }, "bin": { "tailwind": "lib/cli.js", "tailwindcss": "lib/cli.js" } }, "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ=="], - "simstudio/@types/node": ["@types/node@20.19.39", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-orrrD74MBUyK8jOAD/r0+lfa1I2MO6I+vAkmAWzMYbCcgrN4lCrmK52gRFQq/JRxfYPfonkr4b0jcY7Olqdqbw=="], + "simstudio/@types/node": ["@types/node@20.19.40", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-xxx6M2IpSTnnKcR0cMvIiohkiCx20/oRPtWGbenFygKCGl3zqUzdNjQ/1V4solq1LU+dgv0nQzeGOuqkqZGg0Q=="], "simstudio/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - "simstudio-ts-sdk/@types/node": ["@types/node@20.19.39", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-orrrD74MBUyK8jOAD/r0+lfa1I2MO6I+vAkmAWzMYbCcgrN4lCrmK52gRFQq/JRxfYPfonkr4b0jcY7Olqdqbw=="], + "simstudio-ts-sdk/@types/node": ["@types/node@20.19.40", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-xxx6M2IpSTnnKcR0cMvIiohkiCx20/oRPtWGbenFygKCGl3zqUzdNjQ/1V4solq1LU+dgv0nQzeGOuqkqZGg0Q=="], "slice-ansi/ansi-styles": ["ansi-styles@6.2.3", "", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="], @@ -4595,6 +4599,8 @@ "zod-validation-error/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + "zrender/tslib": ["tslib@2.3.0", "", {}, "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg=="], + "@aws-crypto/sha1-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="], "@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="], @@ -4745,6 +4751,8 @@ "cytoscape-fcose/cose-base/layout-base": ["layout-base@2.0.1", "", {}, "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg=="], + "d3-sankey/d3-array/internmap": ["internmap@1.0.1", "", {}, "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw=="], + "d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="], "docx/@types/node/undici-types": ["undici-types@7.19.2", "", {}, "sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg=="], @@ -4943,6 +4951,8 @@ "oauth2-mock-server/express/merge-descriptors": ["merge-descriptors@1.0.3", "", {}, "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ=="], + "oauth2-mock-server/express/path-to-regexp": ["path-to-regexp@0.1.13", "", {}, "sha512-A/AGNMFN3c8bOlvV9RreMdrv7jsmF9XIfDeCd87+I8RNg6s78BhJxMu69NEMHBSJFxKidViTEdruRwEk/WIKqA=="], + "oauth2-mock-server/express/qs": ["qs@6.14.2", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q=="], "oauth2-mock-server/express/send": ["send@0.19.2", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "~0.5.2", "http-errors": "~2.0.1", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "~2.4.1", "range-parser": "~1.2.1", "statuses": "~2.0.2" } }, "sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg=="], @@ -5125,7 +5135,7 @@ "@trigger.dev/core/socket.io-client/engine.io-client/xmlhttprequest-ssl": ["xmlhttprequest-ssl@2.0.0", "", {}, "sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A=="], - "@trigger.dev/core/socket.io/engine.io/@types/node": ["@types/node@25.6.0", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ=="], + "@trigger.dev/core/socket.io/engine.io/@types/node": ["@types/node@25.6.2", "", { "dependencies": { "undici-types": "~7.19.0" } }, "sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw=="], "@trigger.dev/core/socket.io/engine.io/cookie": ["cookie@0.4.2", "", {}, "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA=="], From f122b686a408285d8b77be9a6d23a395e2e90463 Mon Sep 17 00:00:00 2001 From: Waleed Date: Sat, 9 May 2026 14:44:10 -0700 Subject: [PATCH 32/33] fix(uploads): write workspaceFiles row when issuing presigned URL (#4537) * fix(uploads): write workspaceFiles row when issuing presigned URL * test(uploads): cover insertFileMetadata failure path in presigned route --- .../sim/app/api/files/presigned/route.test.ts | 121 ++++++++++++++++++ apps/sim/app/api/files/presigned/route.ts | 31 +++++ 2 files changed, 152 insertions(+) diff --git a/apps/sim/app/api/files/presigned/route.test.ts b/apps/sim/app/api/files/presigned/route.test.ts index 7c4893dc44d..724aab5d065 100644 --- a/apps/sim/app/api/files/presigned/route.test.ts +++ b/apps/sim/app/api/files/presigned/route.test.ts @@ -25,6 +25,7 @@ const { mockGetUserEntityPermissions, mockGenerateWorkspaceFileKey, mockGenerateExecutionFileKey, + mockInsertFileMetadata, } = vi.hoisted(() => ({ mockVerifyFileAccess: vi.fn().mockResolvedValue(true), mockVerifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true), @@ -50,6 +51,7 @@ const { (ctx: { workspaceId: string; workflowId: string; executionId: string }, fileName: string) => `execution/${ctx.workspaceId}/${ctx.workflowId}/${ctx.executionId}/${fileName}` ), + mockInsertFileMetadata: vi.fn().mockResolvedValue({ id: 'wf_test' }), })) vi.mock('@/app/api/files/authorization', () => ({ @@ -89,6 +91,10 @@ vi.mock('@/lib/uploads/contexts/execution/utils', () => ({ generateExecutionFileKey: mockGenerateExecutionFileKey, })) +vi.mock('@/lib/uploads/server/metadata', () => ({ + insertFileMetadata: mockInsertFileMetadata, +})) + vi.mock('@/lib/uploads/utils/file-utils', () => ({ isImageFileType: mockIsImageFileType, })) @@ -614,6 +620,57 @@ describe('/api/files/presigned', () => { const response = await POST(request) expect(response.status).toBe(403) }) + + it('inserts a workspaceFiles row with context=mothership so previews authorize', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=mothership&workspaceId=ws-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'screenshot.png', + contentType: 'image/png', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + const data = await response.json() + + expect(response.status).toBe(200) + expect(mockInsertFileMetadata).toHaveBeenCalledTimes(1) + expect(mockInsertFileMetadata).toHaveBeenCalledWith({ + key: data.fileInfo.key, + userId: 'test-user-id', + workspaceId: 'ws-1', + context: 'mothership', + originalName: 'screenshot.png', + contentType: 'image/png', + size: 4096, + }) + }) + + it('returns 500 when insertFileMetadata fails so callers do not get an unauthorizable URL', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + mockInsertFileMetadata.mockRejectedValueOnce(new Error('DB connection lost')) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=mothership&workspaceId=ws-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'screenshot.png', + contentType: 'image/png', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + expect(response.status).toBe(500) + }) }) describe('execution uploads', () => { @@ -682,6 +739,70 @@ describe('/api/files/presigned', () => { const response = await POST(request) expect(response.status).toBe(400) }) + + it('inserts a workspaceFiles row with context=execution so previews authorize', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=execution&workspaceId=ws-1&workflowId=wf-1&executionId=exec-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'output.mp4', + contentType: 'video/mp4', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + const data = await response.json() + + expect(response.status).toBe(200) + expect(mockInsertFileMetadata).toHaveBeenCalledTimes(1) + expect(mockInsertFileMetadata).toHaveBeenCalledWith({ + key: data.fileInfo.key, + userId: 'test-user-id', + workspaceId: 'ws-1', + context: 'execution', + originalName: 'output.mp4', + contentType: 'video/mp4', + size: 4096, + }) + }) + }) + + describe('workspace-logos uploads', () => { + it('inserts a workspaceFiles row with context=workspace-logos so logos authorize', async () => { + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3' }) + + const request = new NextRequest( + 'http://localhost:3000/api/files/presigned?type=workspace-logos&workspaceId=ws-1', + { + method: 'POST', + body: JSON.stringify({ + fileName: 'logo.png', + contentType: 'image/png', + fileSize: 4096, + }), + } + ) + + const response = await POST(request) + const data = await response.json() + + expect(response.status).toBe(200) + expect(mockInsertFileMetadata).toHaveBeenCalledTimes(1) + expect(mockInsertFileMetadata).toHaveBeenCalledWith({ + key: data.fileInfo.key, + userId: 'test-user-id', + workspaceId: 'ws-1', + context: 'workspace-logos', + originalName: 'logo.png', + contentType: 'image/png', + size: 4096, + }) + }) }) describe('knowledge-base uploads', () => { diff --git a/apps/sim/app/api/files/presigned/route.ts b/apps/sim/app/api/files/presigned/route.ts index 7c4bb01ec64..8c3eda979d4 100644 --- a/apps/sim/app/api/files/presigned/route.ts +++ b/apps/sim/app/api/files/presigned/route.ts @@ -10,6 +10,7 @@ import { USE_BLOB_STORAGE } from '@/lib/uploads/config' import { generateExecutionFileKey } from '@/lib/uploads/contexts/execution/utils' import { generateWorkspaceFileKey } from '@/lib/uploads/contexts/workspace/workspace-file-manager' import { generatePresignedUploadUrl, hasCloudStorage } from '@/lib/uploads/core/storage-service' +import { insertFileMetadata } from '@/lib/uploads/server/metadata' import { isImageFileType } from '@/lib/uploads/utils/file-utils' import { validateAttachmentFileType, validateFileType } from '@/lib/uploads/utils/validation' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' @@ -157,6 +158,16 @@ export const POST = withRouteHandler(async (request: NextRequest) => { expirationSeconds: 3600, metadata: { workspaceId }, }) + + await insertFileMetadata({ + key: presignedUrlResponse.key, + userId: sessionUserId, + workspaceId, + context: 'mothership', + originalName: fileName, + contentType, + size: fileSize, + }) } else if (uploadType === 'execution') { const workflowId = request.nextUrl.searchParams.get('workflowId') const executionId = request.nextUrl.searchParams.get('executionId') @@ -191,6 +202,16 @@ export const POST = withRouteHandler(async (request: NextRequest) => { expirationSeconds: 3600, metadata: { workspaceId, workflowId, executionId }, }) + + await insertFileMetadata({ + key: presignedUrlResponse.key, + userId: sessionUserId, + workspaceId, + context: 'execution', + originalName: fileName, + contentType, + size: fileSize, + }) } else if (uploadType === 'workspace-logos') { const workspaceId = request.nextUrl.searchParams.get('workspaceId') if (!workspaceId?.trim()) { @@ -222,6 +243,16 @@ export const POST = withRouteHandler(async (request: NextRequest) => { expirationSeconds: 3600, metadata: { workspaceId }, }) + + await insertFileMetadata({ + key: presignedUrlResponse.key, + userId: sessionUserId, + workspaceId, + context: 'workspace-logos', + originalName: fileName, + contentType, + size: fileSize, + }) } else { if (uploadType === 'profile-pictures') { if (!sessionUserId?.trim()) { From 6544e0a7831deabaa18ef48f6a43fc11bdba4ecc Mon Sep 17 00:00:00 2001 From: Waleed Date: Sat, 9 May 2026 14:52:48 -0700 Subject: [PATCH 33/33] fix(tables): inline editing center alignment in table cells (#4538) --- .../[tableId]/components/table-grid/cells/cell-content.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-content.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-content.tsx index 2fbbe78f194..3447d535327 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-content.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table-grid/cells/cell-content.tsx @@ -43,7 +43,7 @@ export function CellContent({ return ( <> {isEditing && ( -
    +