Skip to content

Commit 50817dc

Browse files
fix(table): preserve workflow groups on CSV column-add and dispatch after tx commit
Two bugs in the CSV-import path: - addTableColumnsWithTx rebuilt the schema with only `columns`, dropping `workflowGroups` (and any other top-level schema fields). Importing CSV into a table that has workflow groups erased the group config. Spread `table.schema` first so siblings survive. - batchInsertRowsWithTx fired fireTableTrigger and scheduleRunsForRows from inside the caller's transaction. Both read through the global db connection, so they could run before the inserts committed and see no rows. Extracted the dispatch into dispatchAfterBatchInsert; non-tx wrapper fires it after `db.transaction(...)` resolves, and the CSV import route does the same after its tx.
1 parent 81845ae commit 50817dc

3 files changed

Lines changed: 38 additions & 9 deletions

File tree

apps/sim/app/api/table/[tableId]/import/route.test.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,13 @@ const {
1111
mockBatchInsertRowsWithTx,
1212
mockReplaceTableRowsWithTx,
1313
mockAddTableColumnsWithTx,
14+
mockDispatchAfterBatchInsert,
1415
} = vi.hoisted(() => ({
1516
mockCheckAccess: vi.fn(),
1617
mockBatchInsertRowsWithTx: vi.fn(),
1718
mockReplaceTableRowsWithTx: vi.fn(),
1819
mockAddTableColumnsWithTx: vi.fn(),
20+
mockDispatchAfterBatchInsert: vi.fn(),
1921
}))
2022

2123
vi.mock('@sim/utils/id', () => ({
@@ -44,6 +46,7 @@ vi.mock('@/lib/table/service', () => ({
4446
batchInsertRowsWithTx: mockBatchInsertRowsWithTx,
4547
replaceTableRowsWithTx: mockReplaceTableRowsWithTx,
4648
addTableColumnsWithTx: mockAddTableColumnsWithTx,
49+
dispatchAfterBatchInsert: mockDispatchAfterBatchInsert,
4750
}))
4851

4952
import { POST } from '@/app/api/table/[tableId]/import/route'

apps/sim/app/api/table/[tableId]/import/route.ts

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,13 @@ import {
2323
type CsvHeaderMapping,
2424
CsvImportValidationError,
2525
coerceRowsForTable,
26+
dispatchAfterBatchInsert,
2627
inferColumnType,
2728
parseCsvBuffer,
2829
replaceTableRowsWithTx,
2930
sanitizeName,
3031
type TableDefinition,
32+
type TableRow,
3133
type TableSchema,
3234
validateMapping,
3335
} from '@/lib/table'
@@ -213,13 +215,13 @@ export const POST = withRouteHandler(async (request: NextRequest, { params }: Ro
213215
}
214216

215217
try {
216-
const inserted = await db.transaction(async (trx) => {
218+
const txResult = await db.transaction(async (trx) => {
217219
let working = table
218220
if (additions.length > 0) {
219221
working = await addTableColumnsWithTx(trx, table, additions, requestId)
220222
}
221223

222-
let total = 0
224+
const allInserted: TableRow[] = []
223225
for (let i = 0; i < coerced.length; i += CSV_MAX_BATCH_SIZE) {
224226
const batch = coerced.slice(i, i + CSV_MAX_BATCH_SIZE)
225227
const batchRequestId = generateId().slice(0, 8)
@@ -234,10 +236,15 @@ export const POST = withRouteHandler(async (request: NextRequest, { params }: Ro
234236
working,
235237
batchRequestId
236238
)
237-
total += result.length
239+
allInserted.push(...result)
238240
}
239-
return total
241+
return { inserted: allInserted, working }
240242
})
243+
const { inserted: insertedRows, working: finalTable } = txResult
244+
const inserted = insertedRows.length
245+
// Fire trigger + scheduler AFTER the tx commits — both read through the
246+
// global db connection and would otherwise see no rows.
247+
dispatchAfterBatchInsert(finalTable, insertedRows, requestId)
241248

242249
logger.info(`[${requestId}] Append CSV imported`, {
243250
tableId: table.id,

apps/sim/lib/table/service.ts

Lines changed: 24 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -571,7 +571,12 @@ export async function addTableColumnsWithTx(
571571
)
572572
}
573573

574-
const updatedSchema: TableSchema = { columns: [...table.schema.columns, ...additions] }
574+
// Spread `table.schema` first so workflow groups (and any future top-level
575+
// schema fields) survive a CSV import that only adds plain columns.
576+
const updatedSchema: TableSchema = {
577+
...table.schema,
578+
columns: [...table.schema.columns, ...additions],
579+
}
575580
const now = new Date()
576581

577582
await trx
@@ -945,7 +950,9 @@ export async function batchInsertRows(
945950
table: TableDefinition,
946951
requestId: string
947952
): Promise<TableRow[]> {
948-
return db.transaction((trx) => batchInsertRowsWithTx(trx, data, table, requestId))
953+
const result = await db.transaction((trx) => batchInsertRowsWithTx(trx, data, table, requestId))
954+
dispatchAfterBatchInsert(table, result, requestId)
955+
return result
949956
}
950957

951958
/**
@@ -1043,12 +1050,24 @@ export async function batchInsertRowsWithTx(
10431050
updatedAt: r.updatedAt,
10441051
}))
10451052

1046-
void fireTableTrigger(data.tableId, table.name, 'insert', result, null, table.schema, requestId)
1047-
void scheduleRunsForRows(table, result)
1048-
10491053
return result
10501054
}
10511055

1056+
/**
1057+
* Side-effect dispatch for an insert batch. Caller fires this AFTER the
1058+
* surrounding transaction commits — `fireTableTrigger` and
1059+
* `scheduleRunsForRows` both read through the global db connection, so firing
1060+
* inside the tx can see no rows and no-op.
1061+
*/
1062+
export function dispatchAfterBatchInsert(
1063+
table: TableDefinition,
1064+
result: TableRow[],
1065+
requestId: string
1066+
): void {
1067+
void fireTableTrigger(table.id, table.name, 'insert', result, null, table.schema, requestId)
1068+
void scheduleRunsForRows(table, result)
1069+
}
1070+
10521071
/**
10531072
* Replaces all rows in a table with a new set of rows. Deletes existing rows
10541073
* and inserts the provided rows inside a single transaction so the table is

0 commit comments

Comments
 (0)