Skip to content
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Merge staging into fix/workflow-persistence-error to resolve conflict…
…s in persistence utils
  • Loading branch information
Siddhartha-singh01 committed Apr 4, 2026
commit 079b113e8741e7debb2b31f10c267120af3d2822
114 changes: 53 additions & 61 deletions apps/sim/lib/workflows/persistence/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -545,38 +545,39 @@ export async function saveWorkflowToNormalizedTables(
tx.delete(workflowSubflows).where(eq(workflowSubflows.workflowId, workflowId)),
])

const CHUNK_SIZE = 50

// Insert blocks
if (Object.keys(state.blocks).length > 0) {
const blockInserts = Object.values(state.blocks).map((block) => ({
id: block.id,
workflowId: workflowId,
type: block.type,
name: block.name || '',
positionX: String(block.position?.x || 0),
positionY: String(block.position?.y || 0),
enabled: block.enabled ?? true,
horizontalHandles: block.horizontalHandles ?? true,
advancedMode: block.advancedMode ?? false,
triggerMode: block.triggerMode ?? false,
height: String(block.height || 0),
subBlocks: block.subBlocks || {},
outputs: block.outputs || {},
data: block.data || {},
parentId: block.data?.parentId || null,
extent: block.data?.extent || null,
locked: block.locked ?? false,
}))

// SQLite limits bound parameters to 999 per statement.
// workflowBlocks has 17 fields -> max safe chunk = floor(999/17) = 58.
// Using 50 for a conservative margin.
for (let i = 0; i < blockInserts.length; i += CHUNK_SIZE) {
await tx.insert(workflowBlocks).values(blockInserts.slice(i, i + CHUNK_SIZE))
}
const CHUNK_SIZE = 50

// Insert blocks
if (Object.keys(state.blocks).length > 0) {
const blockInserts = Object.values(state.blocks).map((block) => ({
id: block.id,
workflowId: workflowId,
type: block.type,
name: block.name || '',
positionX: String(block.position?.x || 0),
positionY: String(block.position?.y || 0),
enabled: block.enabled ?? true,
horizontalHandles: block.horizontalHandles ?? true,
advancedMode: block.advancedMode ?? false,
triggerMode: block.triggerMode ?? false,
height: String(block.height || 0),
subBlocks: block.subBlocks || {},
outputs: block.outputs || {},
data: block.data || {},
parentId: block.data?.parentId || null,
extent: block.data?.extent || null,
locked: block.locked ?? false,
}))

// SQLite limits bound parameters to 999 per statement.
// workflowBlocks has 17 fields -> max safe chunk = floor(999/17) = 58.
// Using 50 for a conservative margin.
for (let i = 0; i < blockInserts.length; i += CHUNK_SIZE) {
await tx.insert(workflowBlocks).values(blockInserts.slice(i, i + CHUNK_SIZE))
}
}

// Insert edges
if (state.edges.length > 0) {
const edgeInserts = state.edges.map((edge) => ({
id: edge.id,
Expand All @@ -587,10 +588,10 @@ export async function saveWorkflowToNormalizedTables(
targetHandle: edge.targetHandle || null,
}))

for (let i = 0; i < edgeInserts.length; i += CHUNK_SIZE) {
await tx.insert(workflowEdges).values(edgeInserts.slice(i, i + CHUNK_SIZE))
}
for (let i = 0; i < edgeInserts.length; i += CHUNK_SIZE) {
await tx.insert(workflowEdges).values(edgeInserts.slice(i, i + CHUNK_SIZE))
}
}

const subflowInserts: SubflowInsert[] = []

Expand All @@ -601,22 +602,6 @@ export async function saveWorkflowToNormalizedTables(
type: SUBFLOW_TYPES.LOOP,
config: loop,
})

// Add parallels
Object.values(canonicalParallels).forEach((parallel) => {
subflowInserts.push({
id: parallel.id,
workflowId: workflowId,
type: SUBFLOW_TYPES.PARALLEL,
config: parallel,
})
})

if (subflowInserts.length > 0) {
for (let i = 0; i < subflowInserts.length; i += CHUNK_SIZE) {
await tx.insert(workflowSubflows).values(subflowInserts.slice(i, i + CHUNK_SIZE))
}
}
})

Object.values(canonicalParallels).forEach((parallel) => {
Expand All @@ -629,7 +614,9 @@ export async function saveWorkflowToNormalizedTables(
})

if (subflowInserts.length > 0) {
await tx.insert(workflowSubflows).values(subflowInserts)
for (let i = 0; i < subflowInserts.length; i += CHUNK_SIZE) {
await tx.insert(workflowSubflows).values(subflowInserts.slice(i, i + CHUNK_SIZE))
}
}
}

Expand Down Expand Up @@ -912,18 +899,23 @@ export function regenerateWorkflowStateIds(state: RegenerateStateInput): Regener

// Regenerate edges with updated source/target references
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Comment indented to wrong level

This comment was shifted to 4-space indentation, but the forEach call it labels on line 902 remains at 2-space indentation. The mismatch makes the comment appear detached from the code it describes.

Suggested change
// Regenerate edges with updated source/target references
// Regenerate edges with updated source/target references


; (state.edges || []).forEach((edge: Edge) => {
const newId = edgeIdMapping.get(edge.id)!
const newSource = blockIdMapping.get(edge.source) || edge.source
const newTarget = blockIdMapping.get(edge.target) || edge.target

newEdges.push({
...edge,
id: newId,
source: newSource,
target: newTarget,
})
;(state.edges || []).forEach((edge: Edge) => {
const newId = edgeIdMapping.get(edge.id)!
const newSource = blockIdMapping.get(edge.source) || edge.source
const newTarget = blockIdMapping.get(edge.target) || edge.target
const newSourceHandle =
edge.sourceHandle && blockIdMapping.has(edge.source)
? remapConditionEdgeHandle(edge.sourceHandle, edge.source, newSource)
: edge.sourceHandle

newEdges.push({
...edge,
id: newId,
source: newSource,
target: newTarget,
sourceHandle: newSourceHandle,
})
})

// Regenerate loops with updated node references
Object.entries(state.loops || {}).forEach(([oldId, loop]) => {
Expand Down
You are viewing a condensed version of this merge commit. You can view the full changes here.