Skip to content

Commit 9400df6

Browse files
committed
Merge branch 'feat/mothership-copilot' of github.com:simstudioai/sim into feat/mothership-copilot
2 parents d23afb9 + 1ff89cd commit 9400df6

File tree

8 files changed

+215
-4
lines changed

8 files changed

+215
-4
lines changed

apps/sim/app/api/files/serve/[...path]/route.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,7 @@ async function handleLocalFile(filename: string, userId: string): Promise<NextRe
111111
buffer: fileBuffer,
112112
contentType,
113113
filename,
114+
cacheControl: contextParam === 'workspace' ? 'private, no-cache, must-revalidate' : undefined,
114115
})
115116
} catch (error) {
116117
logger.error('Error reading local file:', error)
@@ -172,6 +173,7 @@ async function handleCloudProxy(
172173
buffer: fileBuffer,
173174
contentType,
174175
filename: originalFilename,
176+
cacheControl: context === 'workspace' ? 'private, no-cache, must-revalidate' : undefined,
175177
})
176178
} catch (error) {
177179
logger.error('Error downloading from cloud storage:', error)

apps/sim/app/api/files/utils.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ export interface FileResponse {
2121
buffer: Buffer
2222
contentType: string
2323
filename: string
24+
cacheControl?: string
2425
}
2526

2627
export class FileNotFoundError extends Error {
@@ -256,7 +257,7 @@ export function createFileResponse(file: FileResponse): NextResponse {
256257
headers: {
257258
'Content-Type': contentType,
258259
'Content-Disposition': `${disposition}; ${encodeFilenameForHeader(file.filename)}`,
259-
'Cache-Control': 'public, max-age=31536000',
260+
'Cache-Control': file.cacheControl || 'public, max-age=31536000',
260261
'X-Content-Type-Options': 'nosniff',
261262
'Content-Security-Policy': "default-src 'none'; style-src 'unsafe-inline'; sandbox;",
262263
},

apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/table.tsx

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -540,6 +540,9 @@ export function Table({
540540
if (!el) return
541541

542542
const handleKeyDown = (e: KeyboardEvent) => {
543+
const tag = (e.target as HTMLElement).tagName
544+
if (tag === 'INPUT' || tag === 'TEXTAREA' || tag === 'SELECT') return
545+
543546
const anchor = selectionAnchorRef.current
544547
if (!anchor || editingCellRef.current || editingEmptyCellRef.current) return
545548

apps/sim/hooks/queries/workspace-files.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,8 @@ export function useWorkspaceFiles(workspaceId: string) {
6262
* Fetch file content as text via the serve URL
6363
*/
6464
async function fetchWorkspaceFileContent(key: string, signal?: AbortSignal): Promise<string> {
65-
const serveUrl = `/api/files/serve/${encodeURIComponent(key)}?context=workspace`
66-
const response = await fetch(serveUrl, { signal })
65+
const serveUrl = `/api/files/serve/${encodeURIComponent(key)}?context=workspace&t=${Date.now()}`
66+
const response = await fetch(serveUrl, { signal, cache: 'no-store' })
6767

6868
if (!response.ok) {
6969
throw new Error('Failed to fetch file content')

apps/sim/lib/copilot/tools/server/table/user-table.ts

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,13 @@ import type { UserTableArgs, UserTableResult } from '@/lib/copilot/tools/shared/
44
import {
55
addTableColumn,
66
batchInsertRows,
7+
batchUpdateRows,
78
createTable,
89
deleteColumn,
910
deleteColumns,
1011
deleteRow,
1112
deleteRowsByFilter,
13+
deleteRowsByIds,
1214
deleteTable,
1315
getRowById,
1416
getTableById,
@@ -518,6 +520,105 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
518520
}
519521
}
520522

523+
case 'batch_update_rows': {
524+
if (!args.tableId) {
525+
return { success: false, message: 'Table ID is required' }
526+
}
527+
if (!workspaceId) {
528+
return { success: false, message: 'Workspace ID is required' }
529+
}
530+
531+
const rawUpdates = (args as Record<string, unknown>).updates as
532+
| Array<{ rowId: string; data: Record<string, unknown> }>
533+
| undefined
534+
const columnName = (args as Record<string, unknown>).columnName as string | undefined
535+
const valuesMap = (args as Record<string, unknown>).values as
536+
| Record<string, unknown>
537+
| undefined
538+
539+
let updates: Array<{ rowId: string; data: Record<string, unknown> }>
540+
541+
if (rawUpdates && rawUpdates.length > 0) {
542+
updates = rawUpdates
543+
} else if (columnName && valuesMap) {
544+
updates = Object.entries(valuesMap).map(([rowId, value]) => ({
545+
rowId,
546+
data: { [columnName]: value },
547+
}))
548+
} else {
549+
return {
550+
success: false,
551+
message:
552+
'Provide either "updates" array or "columnName" + "values" map',
553+
}
554+
}
555+
556+
if (updates.length > MAX_BATCH_SIZE) {
557+
return {
558+
success: false,
559+
message: `Too many updates (${updates.length}). Maximum is ${MAX_BATCH_SIZE}.`,
560+
}
561+
}
562+
563+
const table = await getTableById(args.tableId)
564+
if (!table) {
565+
return { success: false, message: `Table not found: ${args.tableId}` }
566+
}
567+
568+
const requestId = crypto.randomUUID().slice(0, 8)
569+
const result = await batchUpdateRows(
570+
{
571+
tableId: args.tableId,
572+
updates: updates as Array<{ rowId: string; data: RowData }>,
573+
workspaceId,
574+
},
575+
table,
576+
requestId
577+
)
578+
579+
return {
580+
success: true,
581+
message: `Updated ${result.affectedCount} rows`,
582+
data: { affectedCount: result.affectedCount, affectedRowIds: result.affectedRowIds },
583+
}
584+
}
585+
586+
case 'batch_delete_rows': {
587+
if (!args.tableId) {
588+
return { success: false, message: 'Table ID is required' }
589+
}
590+
if (!workspaceId) {
591+
return { success: false, message: 'Workspace ID is required' }
592+
}
593+
594+
const rowIds = (args as Record<string, unknown>).rowIds as string[] | undefined
595+
if (!rowIds || rowIds.length === 0) {
596+
return { success: false, message: 'rowIds array is required' }
597+
}
598+
599+
if (rowIds.length > MAX_BATCH_SIZE) {
600+
return {
601+
success: false,
602+
message: `Too many row IDs (${rowIds.length}). Maximum is ${MAX_BATCH_SIZE}.`,
603+
}
604+
}
605+
606+
const requestId = crypto.randomUUID().slice(0, 8)
607+
const result = await deleteRowsByIds(
608+
{ tableId: args.tableId, rowIds, workspaceId },
609+
requestId
610+
)
611+
612+
return {
613+
success: true,
614+
message: `Deleted ${result.deletedCount} rows`,
615+
data: {
616+
deletedCount: result.deletedCount,
617+
deletedRowIds: result.deletedRowIds,
618+
},
619+
}
620+
}
621+
521622
case 'create_from_file': {
522623
const filePath = (args as Record<string, unknown>).filePath as string | undefined
523624
if (!filePath) {

apps/sim/lib/copilot/tools/shared/schemas.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,8 @@ export const UserTableArgsSchema = z.object({
121121
'delete_row',
122122
'update_rows_by_filter',
123123
'delete_rows_by_filter',
124+
'batch_update_rows',
125+
'batch_delete_rows',
124126
'add_column',
125127
'rename_column',
126128
'delete_column',
@@ -135,6 +137,11 @@ export const UserTableArgsSchema = z.object({
135137
rowId: z.string().optional(),
136138
data: z.record(z.any()).optional(),
137139
rows: z.array(z.record(z.any())).optional(),
140+
updates: z
141+
.array(z.object({ rowId: z.string(), data: z.record(z.any()) }))
142+
.optional(),
143+
rowIds: z.array(z.string()).optional(),
144+
values: z.record(z.any()).optional(),
138145
filter: z.any().optional(),
139146
sort: z.record(z.enum(['asc', 'desc'])).optional(),
140147
limit: z.number().optional(),

apps/sim/lib/table/service.ts

Lines changed: 92 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,12 @@
1010
import { db } from '@sim/db'
1111
import { userTableDefinitions, userTableRows } from '@sim/db/schema'
1212
import { createLogger } from '@sim/logger'
13-
import { and, count, eq, gt, gte, sql } from 'drizzle-orm'
13+
import { and, count, eq, gt, gte, inArray, sql } from 'drizzle-orm'
1414
import { COLUMN_TYPES, NAME_PATTERN, TABLE_LIMITS, USER_TABLE_ROWS_SQL_NAME } from './constants'
1515
import { buildFilterClause, buildSortClause } from './sql'
1616
import type {
1717
BatchInsertData,
18+
BatchUpdateByIdData,
1819
BulkDeleteByIdsData,
1920
BulkDeleteByIdsResult,
2021
BulkDeleteData,
@@ -1092,6 +1093,96 @@ export async function updateRowsByFilter(
10921093
}
10931094
}
10941095

1096+
/**
1097+
* Updates multiple rows with per-row data in a single transaction.
1098+
* Avoids the race condition of parallel update_row calls overwriting each other.
1099+
*/
1100+
export async function batchUpdateRows(
1101+
data: BatchUpdateByIdData,
1102+
table: TableDefinition,
1103+
requestId: string
1104+
): Promise<BulkOperationResult> {
1105+
if (data.updates.length === 0) {
1106+
return { affectedCount: 0, affectedRowIds: [] }
1107+
}
1108+
1109+
const rowIds = data.updates.map((u) => u.rowId)
1110+
const existingRows = await db
1111+
.select({ id: userTableRows.id, data: userTableRows.data })
1112+
.from(userTableRows)
1113+
.where(
1114+
and(
1115+
eq(userTableRows.tableId, data.tableId),
1116+
eq(userTableRows.workspaceId, data.workspaceId),
1117+
inArray(userTableRows.id, rowIds)
1118+
)
1119+
)
1120+
1121+
const existingMap = new Map(existingRows.map((r) => [r.id, r.data as RowData]))
1122+
1123+
const missing = rowIds.filter((id) => !existingMap.has(id))
1124+
if (missing.length > 0) {
1125+
throw new Error(`Rows not found: ${missing.join(', ')}`)
1126+
}
1127+
1128+
const mergedUpdates: Array<{ rowId: string; mergedData: RowData }> = []
1129+
for (const update of data.updates) {
1130+
const existing = existingMap.get(update.rowId)!
1131+
const merged = { ...existing, ...update.data }
1132+
1133+
const sizeValidation = validateRowSize(merged)
1134+
if (!sizeValidation.valid) {
1135+
throw new Error(`Row ${update.rowId}: ${sizeValidation.errors.join(', ')}`)
1136+
}
1137+
1138+
const schemaValidation = validateRowAgainstSchema(merged, table.schema)
1139+
if (!schemaValidation.valid) {
1140+
throw new Error(`Row ${update.rowId}: ${schemaValidation.errors.join(', ')}`)
1141+
}
1142+
1143+
mergedUpdates.push({ rowId: update.rowId, mergedData: merged })
1144+
}
1145+
1146+
const uniqueColumns = getUniqueColumns(table.schema)
1147+
if (uniqueColumns.length > 0) {
1148+
for (const { rowId, mergedData } of mergedUpdates) {
1149+
const uniqueValidation = await checkUniqueConstraintsDb(
1150+
data.tableId,
1151+
mergedData,
1152+
table.schema,
1153+
rowId
1154+
)
1155+
if (!uniqueValidation.valid) {
1156+
throw new Error(`Row ${rowId}: ${uniqueValidation.errors.join(', ')}`)
1157+
}
1158+
}
1159+
}
1160+
1161+
const now = new Date()
1162+
1163+
await db.transaction(async (trx) => {
1164+
for (let i = 0; i < mergedUpdates.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
1165+
const batch = mergedUpdates.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
1166+
const updatePromises = batch.map(({ rowId, mergedData }) =>
1167+
trx
1168+
.update(userTableRows)
1169+
.set({ data: mergedData, updatedAt: now })
1170+
.where(eq(userTableRows.id, rowId))
1171+
)
1172+
await Promise.all(updatePromises)
1173+
}
1174+
})
1175+
1176+
logger.info(
1177+
`[${requestId}] Batch updated ${mergedUpdates.length} rows in table ${data.tableId}`
1178+
)
1179+
1180+
return {
1181+
affectedCount: mergedUpdates.length,
1182+
affectedRowIds: mergedUpdates.map((u) => u.rowId),
1183+
}
1184+
}
1185+
10951186
type DbTransaction = Parameters<Parameters<typeof db.transaction>[0]>[0]
10961187

10971188
/**

apps/sim/lib/table/types.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,12 @@ export interface BulkUpdateData {
209209
workspaceId: string
210210
}
211211

212+
export interface BatchUpdateByIdData {
213+
tableId: string
214+
updates: Array<{ rowId: string; data: RowData }>
215+
workspaceId: string
216+
}
217+
212218
export interface BulkDeleteData {
213219
tableId: string
214220
filter: Filter

0 commit comments

Comments
 (0)