From 398dc658ce1773508b84c2d56ddb351e323822f5 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 14:39:24 -0700 Subject: [PATCH 01/17] fix(security): harden HIGH deepsec findings across multiple attack surfaces MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Supabase tools (get_row, delete, update): validate table name with strict identifier regex and encodeURIComponent to prevent LLM-controlled path traversal to admin endpoints; add missing empty-filter guard to update matching the delete.ts pattern - SFTP/SMTP/SharePoint upload routes: add verifyFileAccess ownership check before downloadFileFromStorage, matching the WordPress reference pattern; rejects files the requesting user does not own with 404 - Gmail labels, OneDrive folders, Wealthbox items (×2): replace bare resolveOAuthAccountId + workspace-only membership check with authorizeCredentialUse which enforces credentialMember table; use credentialOwnerUserId for token refresh instead of bare accountRow.userId - A2A utils: thread pre-resolved IP from validateUrlWithDNS into A2A SDK via pinnedFetch (secureFetchWithPinnedIP) for JsonRpcTransportFactory, RestTransportFactory, and DefaultAgentCardResolver, closing the TOCTOU DNS rebinding window - SSH utils: cap stdout/stderr accumulation at 16 MB with truncation marker to prevent OOM from unbounded command output - Form DELETE route: replace db.delete() with db.update({archivedAt}) for true soft delete matching the schema's archivedAt column - Workflow admin import: fix Array.isArray() guard that silently dropped all variables (export format is Record, not Array) - Multipart upload: apply checkStorageQuota and MAX_WORKSPACE_FILE_SIZE to mothership context, closing the quota bypass for workspace-scoped storage --- .../api/auth/oauth/wealthbox/items/route.ts | 52 ++++-------------- apps/sim/app/api/files/multipart/route.ts | 17 ++++++ apps/sim/app/api/form/manage/[id]/route.ts | 7 ++- apps/sim/app/api/tools/gmail/labels/route.ts | 53 ++++-------------- .../app/api/tools/onedrive/folders/route.ts | 48 ++++------------- apps/sim/app/api/tools/sftp/upload/route.ts | 17 ++++++ .../app/api/tools/sharepoint/upload/route.ts | 17 ++++++ apps/sim/app/api/tools/smtp/send/route.ts | 17 ++++++ apps/sim/app/api/tools/ssh/utils.ts | 34 ++++++++++-- .../app/api/tools/wealthbox/items/route.ts | 52 ++++-------------- .../api/v1/admin/workflows/import/route.ts | 26 ++++++++- .../message/components/markdown-renderer.tsx | 2 +- .../components/file-viewer/preview-panel.tsx | 4 +- .../components/chat-content/chat-content.tsx | 2 +- .../components/note-block/note-block.tsx | 2 +- apps/sim/lib/a2a/utils.ts | 54 +++++++++++++++++-- apps/sim/tools/supabase/delete.ts | 8 +-- apps/sim/tools/supabase/get_row.ts | 9 ++-- apps/sim/tools/supabase/update.ts | 12 +++-- 19 files changed, 244 insertions(+), 189 deletions(-) diff --git a/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts b/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts index 102e8f16c0..252b1d4123 100644 --- a/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts +++ b/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts @@ -1,14 +1,11 @@ -import { db } from '@sim/db' -import { account } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { wealthboxOAuthItemsContract } from '@/lib/api/contracts/selectors/wealthbox' import { parseRequest } from '@/lib/api/server' -import { getSession } from '@/lib/auth' +import { authorizeCredentialUse } from '@/lib/auth/credential-access' import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import { refreshAccessTokenIfNeeded, resolveOAuthAccountId } from '@/app/api/auth/oauth/utils' +import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils' export const dynamic = 'force-dynamic' @@ -30,51 +27,22 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const requestId = generateRequestId() try { - const session = await getSession() - - if (!session?.user?.id) { - logger.warn(`[${requestId}] Unauthenticated request rejected`) - return NextResponse.json({ error: 'User not authenticated' }, { status: 401 }) - } - const parsed = await parseRequest(wealthboxOAuthItemsContract, request, {}) if (!parsed.success) return parsed.response const { credentialId, type } = parsed.data.query const query = parsed.data.query.query ?? '' - const resolved = await resolveOAuthAccountId(credentialId) - if (!resolved) { - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) - } - - if (resolved.workspaceId) { - const { getUserEntityPermissions } = await import('@/lib/workspaces/permissions/utils') - const perm = await getUserEntityPermissions( - session.user.id, - 'workspace', - resolved.workspaceId - ) - if (perm === null) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) - } - } - - const credentials = await db - .select() - .from(account) - .where(eq(account.id, resolved.accountId)) - .limit(1) - - if (!credentials.length) { - logger.warn(`[${requestId}] Credential not found`, { credentialId }) - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) + const authz = await authorizeCredentialUse(request as any, { + credentialId, + requireWorkflowIdForInternal: false, + }) + if (!authz.ok || !authz.credentialOwnerUserId) { + return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 }) } - const accountRow = credentials[0] - const accessToken = await refreshAccessTokenIfNeeded( - resolved.accountId, - accountRow.userId, + credentialId, + authz.credentialOwnerUserId, requestId ) diff --git a/apps/sim/app/api/files/multipart/route.ts b/apps/sim/app/api/files/multipart/route.ts index e61cbd543a..84420450b6 100644 --- a/apps/sim/app/api/files/multipart/route.ts +++ b/apps/sim/app/api/files/multipart/route.ts @@ -159,10 +159,27 @@ export const POST = withRouteHandler(async (request: NextRequest) => { ) } } else if (context === 'mothership') { + const { MAX_WORKSPACE_FILE_SIZE } = await import('@/lib/uploads/shared/types') + if (typeof fileSize === 'number' && fileSize > MAX_WORKSPACE_FILE_SIZE) { + return NextResponse.json( + { error: `File size exceeds maximum of ${MAX_WORKSPACE_FILE_SIZE} bytes` }, + { status: 413 } + ) + } + const { generateWorkspaceFileKey } = await import( '@/lib/uploads/contexts/workspace/workspace-file-manager' ) customKey = generateWorkspaceFileKey(workspaceId, fileName) + + const { checkStorageQuota } = await import('@/lib/billing/storage') + const quotaCheck = await checkStorageQuota(userId, fileSize) + if (!quotaCheck.allowed) { + return NextResponse.json( + { error: quotaCheck.error || 'Storage limit exceeded' }, + { status: 413 } + ) + } } else if (context === 'execution') { const workflowId = (data as { workflowId?: unknown }).workflowId const executionId = (data as { executionId?: unknown }).executionId diff --git a/apps/sim/app/api/form/manage/[id]/route.ts b/apps/sim/app/api/form/manage/[id]/route.ts index 242d923d30..d898c6f4f5 100644 --- a/apps/sim/app/api/form/manage/[id]/route.ts +++ b/apps/sim/app/api/form/manage/[id]/route.ts @@ -197,9 +197,12 @@ export const DELETE = withRouteHandler( return createErrorResponse('Form not found or access denied', 404) } - await db.delete(form).where(eq(form.id, id)) + await db + .update(form) + .set({ archivedAt: new Date(), updatedAt: new Date() }) + .where(eq(form.id, id)) - logger.info(`Form ${id} deleted (soft delete)`) + logger.info(`Form ${id} soft deleted`) recordAudit({ workspaceId: formWorkspaceId ?? null, diff --git a/apps/sim/app/api/tools/gmail/labels/route.ts b/apps/sim/app/api/tools/gmail/labels/route.ts index d675ea6e48..432050f9a2 100644 --- a/apps/sim/app/api/tools/gmail/labels/route.ts +++ b/apps/sim/app/api/tools/gmail/labels/route.ts @@ -1,11 +1,8 @@ -import { db } from '@sim/db' -import { account } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { gmailLabelsSelectorContract } from '@/lib/api/contracts/selectors/google' import { parseRequest } from '@/lib/api/server' -import { getSession } from '@/lib/auth' +import { authorizeCredentialUse } from '@/lib/auth/credential-access' import { validateAlphanumericId } from '@/lib/core/security/input-validation' import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' @@ -32,13 +29,6 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const requestId = generateRequestId() try { - const session = await getSession() - - if (!session?.user?.id) { - logger.warn(`[${requestId}] Unauthenticated labels request rejected`) - return NextResponse.json({ error: 'User not authenticated' }, { status: 401 }) - } - const parsed = await parseRequest(gmailLabelsSelectorContract, request, {}) if (!parsed.success) return parsed.response const { credentialId, query } = parsed.data.query @@ -50,23 +40,19 @@ export const GET = withRouteHandler(async (request: NextRequest) => { return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 }) } + const authz = await authorizeCredentialUse(request as any, { + credentialId, + requireWorkflowIdForInternal: false, + }) + if (!authz.ok || !authz.credentialOwnerUserId) { + return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 }) + } + const resolved = await resolveOAuthAccountId(credentialId) if (!resolved) { return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) } - if (resolved.workspaceId) { - const { getUserEntityPermissions } = await import('@/lib/workspaces/permissions/utils') - const perm = await getUserEntityPermissions( - session.user.id, - 'workspace', - resolved.workspaceId - ) - if (perm === null) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) - } - } - let accessToken: string | null = null if (resolved.credentialType === 'service_account' && resolved.credentialId) { @@ -76,26 +62,9 @@ export const GET = withRouteHandler(async (request: NextRequest) => { impersonateEmail ) } else { - const credentials = await db - .select() - .from(account) - .where(eq(account.id, resolved.accountId)) - .limit(1) - - if (!credentials.length) { - logger.warn(`[${requestId}] Credential not found`) - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) - } - - const accountRow = credentials[0] - - logger.info( - `[${requestId}] Using credential: ${accountRow.id}, provider: ${accountRow.providerId}` - ) - accessToken = await refreshAccessTokenIfNeeded( - resolved.accountId, - accountRow.userId, + credentialId, + authz.credentialOwnerUserId, requestId, getScopesForService('gmail') ) diff --git a/apps/sim/app/api/tools/onedrive/folders/route.ts b/apps/sim/app/api/tools/onedrive/folders/route.ts index 2538c6ae39..a779ac9d71 100644 --- a/apps/sim/app/api/tools/onedrive/folders/route.ts +++ b/apps/sim/app/api/tools/onedrive/folders/route.ts @@ -1,15 +1,12 @@ -import { db } from '@sim/db' -import { account } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { generateId } from '@sim/utils/id' -import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { onedriveFoldersQuerySchema } from '@/lib/api/contracts/selectors/microsoft' import { getValidationErrorMessage } from '@/lib/api/server' -import { getSession } from '@/lib/auth' +import { authorizeCredentialUse } from '@/lib/auth/credential-access' import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import { refreshAccessTokenIfNeeded, resolveOAuthAccountId } from '@/app/api/auth/oauth/utils' +import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils' import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types' export const dynamic = 'force-dynamic' @@ -23,11 +20,6 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const requestId = generateId().slice(0, 8) try { - const session = await getSession() - if (!session?.user?.id) { - return NextResponse.json({ error: 'User not authenticated' }, { status: 401 }) - } - const { searchParams } = new URL(request.url) const validation = onedriveFoldersQuerySchema.safeParse({ credentialId: searchParams.get('credentialId') ?? '', @@ -51,37 +43,17 @@ export const GET = withRouteHandler(async (request: NextRequest) => { return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 }) } - const resolved = await resolveOAuthAccountId(credentialId) - if (!resolved) { - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) - } - - if (resolved.workspaceId) { - const { getUserEntityPermissions } = await import('@/lib/workspaces/permissions/utils') - const perm = await getUserEntityPermissions( - session.user.id, - 'workspace', - resolved.workspaceId - ) - if (perm === null) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) - } - } - - const credentials = await db - .select() - .from(account) - .where(eq(account.id, resolved.accountId)) - .limit(1) - if (!credentials.length) { - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) + const authz = await authorizeCredentialUse(request as any, { + credentialId, + requireWorkflowIdForInternal: false, + }) + if (!authz.ok || !authz.credentialOwnerUserId) { + return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 }) } - const accountRow = credentials[0] - const accessToken = await refreshAccessTokenIfNeeded( - resolved.accountId, - accountRow.userId, + credentialId, + authz.credentialOwnerUserId, requestId ) if (!accessToken) { diff --git a/apps/sim/app/api/tools/sftp/upload/route.ts b/apps/sim/app/api/tools/sftp/upload/route.ts index a0dbbbbbdb..01242b2bfa 100644 --- a/apps/sim/app/api/tools/sftp/upload/route.ts +++ b/apps/sim/app/api/tools/sftp/upload/route.ts @@ -7,6 +7,7 @@ import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' +import { verifyFileAccess } from '@/app/api/files/authorization' import { createSftpConnection, getSftp, @@ -95,6 +96,22 @@ export const POST = withRouteHandler(async (request: NextRequest) => { for (const file of userFiles) { try { + if (typeof file.key !== 'string' || file.key.length === 0) { + logger.warn(`[${requestId}] File access check rejected: missing key`) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + if (!authResult.userId) { + logger.warn(`[${requestId}] File access check requires userId but none available`) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + const hasAccess = await verifyFileAccess(file.key, authResult.userId) + if (!hasAccess) { + logger.warn(`[${requestId}] File access denied for user`, { + userId: authResult.userId, + key: file.key, + }) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } logger.info( `[${requestId}] Downloading file for upload: ${file.name} (${file.size} bytes)` ) diff --git a/apps/sim/app/api/tools/sharepoint/upload/route.ts b/apps/sim/app/api/tools/sharepoint/upload/route.ts index 556de6d422..d8dc22f0c2 100644 --- a/apps/sim/app/api/tools/sharepoint/upload/route.ts +++ b/apps/sim/app/api/tools/sharepoint/upload/route.ts @@ -9,6 +9,7 @@ import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' +import { verifyFileAccess } from '@/app/api/files/authorization' import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types' import type { SharepointSkippedFile, SharepointUploadError } from '@/tools/sharepoint/types' @@ -82,6 +83,22 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const errors: SharepointUploadError[] = [] for (const userFile of userFiles) { + if (typeof userFile.key !== 'string' || userFile.key.length === 0) { + logger.warn(`[${requestId}] File access check rejected: missing key`) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + if (!authResult.userId) { + logger.warn(`[${requestId}] File access check requires userId but none available`) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + const hasAccess = await verifyFileAccess(userFile.key, authResult.userId) + if (!hasAccess) { + logger.warn(`[${requestId}] File access denied for user`, { + userId: authResult.userId, + key: userFile.key, + }) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } logger.info(`[${requestId}] Uploading file: ${userFile.name}`) const buffer = await downloadFileFromStorage(userFile, requestId, logger) diff --git a/apps/sim/app/api/tools/smtp/send/route.ts b/apps/sim/app/api/tools/smtp/send/route.ts index 127f8dca33..bdfca13c45 100644 --- a/apps/sim/app/api/tools/smtp/send/route.ts +++ b/apps/sim/app/api/tools/smtp/send/route.ts @@ -10,6 +10,7 @@ import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' +import { verifyFileAccess } from '@/app/api/files/authorization' export const dynamic = 'force-dynamic' @@ -122,6 +123,22 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const attachmentBuffers = await Promise.all( attachments.map(async (file) => { try { + if (typeof file.key !== 'string' || file.key.length === 0) { + logger.warn(`[${requestId}] File access check rejected: missing key`) + throw new Error('File not found') + } + if (!authResult.userId) { + logger.warn(`[${requestId}] File access check requires userId but none available`) + throw new Error('File not found') + } + const hasAccess = await verifyFileAccess(file.key, authResult.userId) + if (!hasAccess) { + logger.warn(`[${requestId}] File access denied for user`, { + userId: authResult.userId, + key: file.key, + }) + throw new Error('File not found') + } logger.info( `[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)` ) diff --git a/apps/sim/app/api/tools/ssh/utils.ts b/apps/sim/app/api/tools/ssh/utils.ts index ed3dae8832..3bc2ed6053 100644 --- a/apps/sim/app/api/tools/ssh/utils.ts +++ b/apps/sim/app/api/tools/ssh/utils.ts @@ -174,6 +174,8 @@ export async function createSSHConnection(config: SSHConnectionConfig): Promise< }) } +const MAX_OUTPUT_BYTES = 16 * 1024 * 1024 + /** * Execute a command on the SSH connection */ @@ -187,21 +189,45 @@ export function executeSSHCommand(client: Client, command: string): Promise { resolve({ - stdout: stdout.trim(), - stderr: stderr.trim(), + stdout: stdoutTruncated + ? `${stdout}\n[output truncated: exceeded 16MB limit]` + : stdout.trim(), + stderr: stderrTruncated + ? `${stderr}\n[stderr truncated: exceeded 16MB limit]` + : stderr.trim(), exitCode: code ?? 0, }) }) stream.on('data', (data: Buffer) => { - stdout += data.toString() + const remaining = MAX_OUTPUT_BYTES - stdoutBytes + if (remaining <= 0) { + stdoutTruncated = true + return + } + const chunk = data.subarray(0, remaining) + stdout += chunk.toString() + stdoutBytes += chunk.length + if (data.length > remaining) stdoutTruncated = true }) stream.stderr.on('data', (data: Buffer) => { - stderr += data.toString() + const remaining = MAX_OUTPUT_BYTES - stderrBytes + if (remaining <= 0) { + stderrTruncated = true + return + } + const chunk = data.subarray(0, remaining) + stderr += chunk.toString() + stderrBytes += chunk.length + if (data.length > remaining) stderrTruncated = true }) }) }) diff --git a/apps/sim/app/api/tools/wealthbox/items/route.ts b/apps/sim/app/api/tools/wealthbox/items/route.ts index 72fcd00b6f..5099b8e1da 100644 --- a/apps/sim/app/api/tools/wealthbox/items/route.ts +++ b/apps/sim/app/api/tools/wealthbox/items/route.ts @@ -1,15 +1,12 @@ -import { db } from '@sim/db' -import { account } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { wealthboxItemsSelectorContract } from '@/lib/api/contracts/selectors/wealthbox' import { parseRequest } from '@/lib/api/server' -import { getSession } from '@/lib/auth' +import { authorizeCredentialUse } from '@/lib/auth/credential-access' import { validatePathSegment } from '@/lib/core/security/input-validation' import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import { refreshAccessTokenIfNeeded, resolveOAuthAccountId } from '@/app/api/auth/oauth/utils' +import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils' export const dynamic = 'force-dynamic' @@ -31,13 +28,6 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const requestId = generateRequestId() try { - const session = await getSession() - - if (!session?.user?.id) { - logger.warn(`[${requestId}] Unauthenticated request rejected`) - return NextResponse.json({ error: 'User not authenticated' }, { status: 401 }) - } - const parsed = await parseRequest(wealthboxItemsSelectorContract, request, {}) if (!parsed.success) return parsed.response const { credentialId, type } = parsed.data.query @@ -55,39 +45,17 @@ export const GET = withRouteHandler(async (request: NextRequest) => { return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 }) } - const resolved = await resolveOAuthAccountId(credentialId) - if (!resolved) { - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) - } - - if (resolved.workspaceId) { - const { getUserEntityPermissions } = await import('@/lib/workspaces/permissions/utils') - const perm = await getUserEntityPermissions( - session.user.id, - 'workspace', - resolved.workspaceId - ) - if (perm === null) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) - } - } - - const credentials = await db - .select() - .from(account) - .where(eq(account.id, resolved.accountId)) - .limit(1) - - if (!credentials.length) { - logger.warn(`[${requestId}] Credential not found`, { credentialId }) - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) + const authz = await authorizeCredentialUse(request as any, { + credentialId, + requireWorkflowIdForInternal: false, + }) + if (!authz.ok || !authz.credentialOwnerUserId) { + return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 }) } - const accountRow = credentials[0] - const accessToken = await refreshAccessTokenIfNeeded( - resolved.accountId, - accountRow.userId, + credentialId, + authz.credentialOwnerUserId, requestId ) diff --git a/apps/sim/app/api/v1/admin/workflows/import/route.ts b/apps/sim/app/api/v1/admin/workflows/import/route.ts index cb38dbc5e8..b3ad741416 100644 --- a/apps/sim/app/api/v1/admin/workflows/import/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/import/route.ts @@ -118,7 +118,31 @@ export const POST = withRouteHandler( return internalErrorResponse(`Failed to save workflow state: ${saveResult.error}`) } - if (workflowData.variables && Array.isArray(workflowData.variables)) { + if ( + workflowData.variables && + typeof workflowData.variables === 'object' && + !Array.isArray(workflowData.variables) + ) { + const variablesRecord: Record = {} + const vars = workflowData.variables as Record< + string, + { id?: string; name: string; type?: string; value: unknown } + > + Object.entries(vars).forEach(([key, v]) => { + const varId = v.id || key + variablesRecord[varId] = { + id: varId, + name: v.name, + type: v.type || 'string', + value: v.value, + } + }) + + await db + .update(workflow) + .set({ variables: variablesRecord, updatedAt: new Date() }) + .where(eq(workflow.id, workflowId)) + } else if (workflowData.variables && Array.isArray(workflowData.variables)) { const variablesRecord: Record = {} workflowData.variables.forEach((v) => { const varId = v.id || generateId() diff --git a/apps/sim/app/chat/components/message/components/markdown-renderer.tsx b/apps/sim/app/chat/components/message/components/markdown-renderer.tsx index 32d49b167b..84b60eea79 100644 --- a/apps/sim/app/chat/components/message/components/markdown-renderer.tsx +++ b/apps/sim/app/chat/components/message/components/markdown-renderer.tsx @@ -115,7 +115,7 @@ const COMPONENTS = { ), blockquote: ({ children }: React.HTMLAttributes) => ( -
+
{children}
), diff --git a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-panel.tsx b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-panel.tsx index 03ab43d4ad..53072bced7 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-panel.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/components/file-viewer/preview-panel.tsx @@ -263,7 +263,7 @@ function CalloutBlock({ type, children }: { type: string; children?: React.React const config = CALLOUT_CONFIG[type] if (!config) { return ( -
+
{children}
) @@ -605,7 +605,7 @@ const STATIC_MARKDOWN_COMPONENTS = { return {children} } return ( -
+
{children}
) diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/message-content/components/chat-content/chat-content.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/message-content/components/chat-content/chat-content.tsx index 62157c89ee..1537bef392 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/message-content/components/chat-content/chat-content.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/message-content/components/chat-content/chat-content.tsx @@ -224,7 +224,7 @@ const MARKDOWN_COMPONENTS = { }, blockquote({ children }: { children?: React.ReactNode }) { return ( -
+
{children}
) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/note-block/note-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/note-block/note-block.tsx index 16a1291ca0..2c24056ca5 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/note-block/note-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/note-block/note-block.tsx @@ -430,7 +430,7 @@ const NOTE_COMPONENTS = { {children} ), blockquote: ({ children }: { children?: React.ReactNode }) => ( -
+
{children}
), diff --git a/apps/sim/lib/a2a/utils.ts b/apps/sim/lib/a2a/utils.ts index a1e8f79a65..35a9faa51d 100644 --- a/apps/sim/lib/a2a/utils.ts +++ b/apps/sim/lib/a2a/utils.ts @@ -14,11 +14,17 @@ import { type Client, ClientFactory, ClientFactoryOptions, + DefaultAgentCardResolver, + JsonRpcTransportFactory, + RestTransportFactory, } from '@a2a-js/sdk/client' import { createLogger } from '@sim/logger' import { toError } from '@sim/utils/errors' import { generateId } from '@sim/utils/id' -import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server' +import { + secureFetchWithPinnedIP, + validateUrlWithDNS, +} from '@/lib/core/security/input-validation.server' import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils' import { A2A_TERMINAL_STATES } from './constants' @@ -60,13 +66,55 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis throw new Error(validation.error || 'Agent URL validation failed') } + const resolvedIP = validation.resolvedIP! + + const pinnedFetch = ( + input: Parameters[0], + init?: Parameters[1] + ): Promise => + secureFetchWithPinnedIP(input.toString(), resolvedIP, { + method: init?.method, + headers: + init?.headers instanceof Headers + ? Object.fromEntries(init.headers.entries()) + : (init?.headers as Record | undefined), + body: + typeof init?.body === 'string' || Buffer.isBuffer(init?.body) + ? (init?.body as string | Buffer) + : init?.body instanceof Uint8Array + ? (init.body as Uint8Array) + : undefined, + signal: init?.signal instanceof AbortSignal ? init.signal : undefined, + }).then(async (res) => { + const headers = new Headers(res.headers.toRecord()) + const body = await res.text() + return new Response(body, { + status: res.status, + statusText: res.statusText, + headers, + }) + }) + + const pinnedTransports = [ + new JsonRpcTransportFactory({ fetchImpl: pinnedFetch }), + new RestTransportFactory({ fetchImpl: pinnedFetch }), + ] + + const pinnedCardResolver = new DefaultAgentCardResolver({ fetchImpl: pinnedFetch }) + + const baseOptions = ClientFactoryOptions.createFrom(ClientFactoryOptions.default, { + transports: pinnedTransports, + cardResolver: pinnedCardResolver, + }) + const factoryOptions = apiKey - ? ClientFactoryOptions.createFrom(ClientFactoryOptions.default, { + ? ClientFactoryOptions.createFrom(baseOptions, { clientConfig: { interceptors: [new ApiKeyInterceptor(apiKey)], }, }) - : ClientFactoryOptions.default + : baseOptions + const factory = new ClientFactory(factoryOptions) // Try standard A2A path first (/.well-known/agent.json) diff --git a/apps/sim/tools/supabase/delete.ts b/apps/sim/tools/supabase/delete.ts index a76a1781b1..96b337bd95 100644 --- a/apps/sim/tools/supabase/delete.ts +++ b/apps/sim/tools/supabase/delete.ts @@ -44,10 +44,12 @@ export const deleteTool: ToolConfig { - // Construct the URL for the Supabase REST API with select to return deleted data - let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${params.table}?select=*` + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { + throw new Error('Invalid table name: must contain only letters, digits, and underscores') + } + + let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` - // Add filters (required for delete) - using PostgREST syntax if (params.filter?.trim()) { url += `&${params.filter.trim()}` } else { diff --git a/apps/sim/tools/supabase/get_row.ts b/apps/sim/tools/supabase/get_row.ts index e21414dee2..8fd3909a9f 100644 --- a/apps/sim/tools/supabase/get_row.ts +++ b/apps/sim/tools/supabase/get_row.ts @@ -50,16 +50,17 @@ export const getRowTool: ToolConfig { - // Construct the URL for the Supabase REST API + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { + throw new Error('Invalid table name: must contain only letters, digits, and underscores') + } + const selectColumns = params.select?.trim() || '*' - let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${params.table}?select=${encodeURIComponent(selectColumns)}` + let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=${encodeURIComponent(selectColumns)}` - // Add filters (required for get_row) - using PostgREST syntax if (params.filter?.trim()) { url += `&${params.filter.trim()}` } - // Limit to 1 row since we want a single row url += `&limit=1` return url diff --git a/apps/sim/tools/supabase/update.ts b/apps/sim/tools/supabase/update.ts index f6aad13ed7..5877c17ca4 100644 --- a/apps/sim/tools/supabase/update.ts +++ b/apps/sim/tools/supabase/update.ts @@ -50,12 +50,18 @@ export const updateTool: ToolConfig { - // Construct the URL for the Supabase REST API with select to return updated data - let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${params.table}?select=*` + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { + throw new Error('Invalid table name: must contain only letters, digits, and underscores') + } + + let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` - // Add filters (required for update) - using PostgREST syntax if (params.filter?.trim()) { url += `&${params.filter.trim()}` + } else { + throw new Error( + 'Filter is required for update operations to prevent accidental update of all rows' + ) } return url From 7b84a79b60152fe56b8f5dbb521e58be7604d38e Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 14:41:00 -0700 Subject: [PATCH 02/17] fix(security): eliminate workspace env lost-update race with atomic JSONB ops PUT: use `variables || excluded.variables` in onConflictDoUpdate so concurrent writes merge atomically in the DB instead of last-writer-wins at the application layer. DELETE: replace the read-modify-write upsert with a single UPDATE that removes keys via the JSONB `-` operator, preventing concurrent deletes from resurrecting previously-removed secrets. --- .../api/workspaces/[id]/environment/route.ts | 30 +++++++++---------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/apps/sim/app/api/workspaces/[id]/environment/route.ts b/apps/sim/app/api/workspaces/[id]/environment/route.ts index ec1e2b4112..b446041173 100644 --- a/apps/sim/app/api/workspaces/[id]/environment/route.ts +++ b/apps/sim/app/api/workspaces/[id]/environment/route.ts @@ -3,7 +3,7 @@ import { db } from '@sim/db' import { workspaceEnvironment } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { generateId } from '@sim/utils/id' -import { eq } from 'drizzle-orm' +import { eq, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { removeWorkspaceEnvironmentContract, @@ -113,23 +113,24 @@ export const PUT = withRouteHandler( }) ).then((entries) => Object.fromEntries(entries)) - const merged = { ...existingEncrypted, ...encryptedIncoming } - - // Upsert by unique workspace_id await db .insert(workspaceEnvironment) .values({ id: generateId(), workspaceId, - variables: merged, + variables: encryptedIncoming, createdAt: new Date(), updatedAt: new Date(), }) .onConflictDoUpdate({ target: [workspaceEnvironment.workspaceId], - set: { variables: merged, updatedAt: new Date() }, + set: { + variables: sql`${workspaceEnvironment.variables} || excluded.variables`, + updatedAt: new Date(), + }, }) + const merged = { ...existingEncrypted, ...encryptedIncoming } const newKeys = Object.keys(variables).filter((k) => !(k in existingEncrypted)) await createWorkspaceEnvCredentials({ workspaceId, newKeys, actingUserId: userId }) @@ -203,18 +204,15 @@ export const DELETE = withRouteHandler( } await db - .insert(workspaceEnvironment) - .values({ - id: wsRows[0]?.id || generateId(), - workspaceId, - variables: current, - createdAt: wsRows[0]?.createdAt || new Date(), + .update(workspaceEnvironment) + .set({ + variables: sql`${workspaceEnvironment.variables} - ARRAY[${sql.join( + keys.map((k) => sql`${k}`), + sql`, ` + )}]::text[]`, updatedAt: new Date(), }) - .onConflictDoUpdate({ - target: [workspaceEnvironment.workspaceId], - set: { variables: current, updatedAt: new Date() }, - }) + .where(eq(workspaceEnvironment.workspaceId, workspaceId)) await deleteWorkspaceEnvCredentials({ workspaceId, removedKeys: keys }) From 7b6e43d205f13f7724cdd97032f4ab5cbd0adcc9 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 14:50:18 -0700 Subject: [PATCH 03/17] fix(security): address audit findings from security fix review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - SMTP send: restructure attachment loop from Promise.all to sequential for...of so verifyFileAccess denial returns 404 instead of propagating as a generic 500 via the SMTP error classifier - Supabase tools: extend table-name validation and encodeURIComponent to the five previously missed tools — insert, upsert, count, query, text_search — completing coverage across all nine Supabase tools - Credential routes: remove unnecessary `request as any` casts in Gmail, OneDrive, and Wealthbox routes; authorizeCredentialUse already accepts NextRequest directly - Form soft delete: also set isActive=false alongside archivedAt so that any future code paths querying by isActive see a consistent state - SSH utils: fix exit code fallback from 0 to -1 so an abnormally closed connection that supplies no exit code is not reported as success - Workspace env: capitalize EXCLUDED.variables in the onConflictDoUpdate set clause to make the pseudo-table reference unambiguous --- .../api/auth/oauth/wealthbox/items/route.ts | 2 +- apps/sim/app/api/form/manage/[id]/route.ts | 2 +- apps/sim/app/api/tools/gmail/labels/route.ts | 2 +- .../app/api/tools/onedrive/folders/route.ts | 2 +- apps/sim/app/api/tools/smtp/send/route.ts | 71 +++++++++---------- apps/sim/app/api/tools/ssh/utils.ts | 2 +- .../app/api/tools/wealthbox/items/route.ts | 2 +- .../api/workspaces/[id]/environment/route.ts | 2 +- apps/sim/tools/supabase/count.ts | 6 +- apps/sim/tools/supabase/insert.ts | 7 +- apps/sim/tools/supabase/query.ts | 6 +- apps/sim/tools/supabase/text_search.ts | 6 +- apps/sim/tools/supabase/upsert.ts | 7 +- 13 files changed, 64 insertions(+), 53 deletions(-) diff --git a/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts b/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts index 252b1d4123..e56f46428b 100644 --- a/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts +++ b/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts @@ -32,7 +32,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const { credentialId, type } = parsed.data.query const query = parsed.data.query.query ?? '' - const authz = await authorizeCredentialUse(request as any, { + const authz = await authorizeCredentialUse(request, { credentialId, requireWorkflowIdForInternal: false, }) diff --git a/apps/sim/app/api/form/manage/[id]/route.ts b/apps/sim/app/api/form/manage/[id]/route.ts index d898c6f4f5..d1c05bbe62 100644 --- a/apps/sim/app/api/form/manage/[id]/route.ts +++ b/apps/sim/app/api/form/manage/[id]/route.ts @@ -199,7 +199,7 @@ export const DELETE = withRouteHandler( await db .update(form) - .set({ archivedAt: new Date(), updatedAt: new Date() }) + .set({ archivedAt: new Date(), isActive: false, updatedAt: new Date() }) .where(eq(form.id, id)) logger.info(`Form ${id} soft deleted`) diff --git a/apps/sim/app/api/tools/gmail/labels/route.ts b/apps/sim/app/api/tools/gmail/labels/route.ts index 432050f9a2..6009125fe5 100644 --- a/apps/sim/app/api/tools/gmail/labels/route.ts +++ b/apps/sim/app/api/tools/gmail/labels/route.ts @@ -40,7 +40,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => { return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 }) } - const authz = await authorizeCredentialUse(request as any, { + const authz = await authorizeCredentialUse(request, { credentialId, requireWorkflowIdForInternal: false, }) diff --git a/apps/sim/app/api/tools/onedrive/folders/route.ts b/apps/sim/app/api/tools/onedrive/folders/route.ts index a779ac9d71..3d28c1f2a4 100644 --- a/apps/sim/app/api/tools/onedrive/folders/route.ts +++ b/apps/sim/app/api/tools/onedrive/folders/route.ts @@ -43,7 +43,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => { return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 }) } - const authz = await authorizeCredentialUse(request as any, { + const authz = await authorizeCredentialUse(request, { credentialId, requireWorkflowIdForInternal: false, }) diff --git a/apps/sim/app/api/tools/smtp/send/route.ts b/apps/sim/app/api/tools/smtp/send/route.ts index bdfca13c45..fe6f6b27f5 100644 --- a/apps/sim/app/api/tools/smtp/send/route.ts +++ b/apps/sim/app/api/tools/smtp/send/route.ts @@ -120,44 +120,39 @@ export const POST = withRouteHandler(async (request: NextRequest) => { ) } - const attachmentBuffers = await Promise.all( - attachments.map(async (file) => { - try { - if (typeof file.key !== 'string' || file.key.length === 0) { - logger.warn(`[${requestId}] File access check rejected: missing key`) - throw new Error('File not found') - } - if (!authResult.userId) { - logger.warn(`[${requestId}] File access check requires userId but none available`) - throw new Error('File not found') - } - const hasAccess = await verifyFileAccess(file.key, authResult.userId) - if (!hasAccess) { - logger.warn(`[${requestId}] File access denied for user`, { - userId: authResult.userId, - key: file.key, - }) - throw new Error('File not found') - } - logger.info( - `[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)` - ) - - const buffer = await downloadFileFromStorage(file, requestId, logger) - - return { - filename: file.name, - content: buffer, - contentType: file.type || 'application/octet-stream', - } - } catch (error) { - logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error) - throw new Error( - `Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}` - ) - } - }) - ) + const attachmentBuffers: { filename: string; content: Buffer; contentType: string }[] = [] + for (const file of attachments) { + if (typeof file.key !== 'string' || file.key.length === 0) { + logger.warn(`[${requestId}] File access check rejected: missing key`) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + if (!authResult.userId) { + logger.warn(`[${requestId}] File access check requires userId but none available`) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + const hasAccess = await verifyFileAccess(file.key, authResult.userId) + if (!hasAccess) { + logger.warn(`[${requestId}] File access denied for user`, { + userId: authResult.userId, + key: file.key, + }) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + try { + logger.info(`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`) + const buffer = await downloadFileFromStorage(file, requestId, logger) + attachmentBuffers.push({ + filename: file.name, + content: buffer, + contentType: file.type || 'application/octet-stream', + }) + } catch (error) { + logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error) + throw new Error( + `Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}` + ) + } + } logger.info(`[${requestId}] Processed ${attachmentBuffers.length} attachment(s)`) mailOptions.attachments = attachmentBuffers diff --git a/apps/sim/app/api/tools/ssh/utils.ts b/apps/sim/app/api/tools/ssh/utils.ts index 3bc2ed6053..48a51eb76c 100644 --- a/apps/sim/app/api/tools/ssh/utils.ts +++ b/apps/sim/app/api/tools/ssh/utils.ts @@ -202,7 +202,7 @@ export function executeSSHCommand(client: Client, command: string): Promise { return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 }) } - const authz = await authorizeCredentialUse(request as any, { + const authz = await authorizeCredentialUse(request, { credentialId, requireWorkflowIdForInternal: false, }) diff --git a/apps/sim/app/api/workspaces/[id]/environment/route.ts b/apps/sim/app/api/workspaces/[id]/environment/route.ts index b446041173..a4b32707ac 100644 --- a/apps/sim/app/api/workspaces/[id]/environment/route.ts +++ b/apps/sim/app/api/workspaces/[id]/environment/route.ts @@ -125,7 +125,7 @@ export const PUT = withRouteHandler( .onConflictDoUpdate({ target: [workspaceEnvironment.workspaceId], set: { - variables: sql`${workspaceEnvironment.variables} || excluded.variables`, + variables: sql`${workspaceEnvironment.variables} || EXCLUDED.variables`, updatedAt: new Date(), }, }) diff --git a/apps/sim/tools/supabase/count.ts b/apps/sim/tools/supabase/count.ts index 88a7bc0cf1..dd348d313b 100644 --- a/apps/sim/tools/supabase/count.ts +++ b/apps/sim/tools/supabase/count.ts @@ -50,9 +50,11 @@ export const countTool: ToolConfig = request: { url: (params) => { - let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${params.table}?select=*` + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { + throw new Error('Invalid table name: must contain only letters, digits, and underscores') + } + let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` - // Add filters if provided if (params.filter?.trim()) { url += `&${params.filter.trim()}` } diff --git a/apps/sim/tools/supabase/insert.ts b/apps/sim/tools/supabase/insert.ts index 9cd3369653..2d9f4bccc7 100644 --- a/apps/sim/tools/supabase/insert.ts +++ b/apps/sim/tools/supabase/insert.ts @@ -43,7 +43,12 @@ export const insertTool: ToolConfig `${supabaseBaseUrl(params.projectId)}/rest/v1/${params.table}?select=*`, + url: (params) => { + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { + throw new Error('Invalid table name: must contain only letters, digits, and underscores') + } + return `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` + }, method: 'POST', headers: (params) => { const headers: Record = { diff --git a/apps/sim/tools/supabase/query.ts b/apps/sim/tools/supabase/query.ts index 46847c0714..6eb48b1a2b 100644 --- a/apps/sim/tools/supabase/query.ts +++ b/apps/sim/tools/supabase/query.ts @@ -68,9 +68,11 @@ export const queryTool: ToolConfig = request: { url: (params) => { - // Construct the URL for the Supabase REST API + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { + throw new Error('Invalid table name: must contain only letters, digits, and underscores') + } const selectColumns = params.select?.trim() || '*' - let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${params.table}?select=${encodeURIComponent(selectColumns)}` + let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=${encodeURIComponent(selectColumns)}` // Add filters if provided - using PostgREST syntax if (params.filter?.trim()) { diff --git a/apps/sim/tools/supabase/text_search.ts b/apps/sim/tools/supabase/text_search.ts index 5a813c0c7a..0f9fff547e 100644 --- a/apps/sim/tools/supabase/text_search.ts +++ b/apps/sim/tools/supabase/text_search.ts @@ -74,11 +74,13 @@ export const textSearchTool: ToolConfig { + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { + throw new Error('Invalid table name: must contain only letters, digits, and underscores') + } const searchType = params.searchType || 'websearch' const language = params.language || 'english' - // Build the text search filter - let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${params.table}?select=*` + let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` // Map search types to PostgREST operators // plfts = plainto_tsquery (natural language), phfts = phraseto_tsquery, wfts = websearch_to_tsquery diff --git a/apps/sim/tools/supabase/upsert.ts b/apps/sim/tools/supabase/upsert.ts index d7ebf41a7e..e5d82c348d 100644 --- a/apps/sim/tools/supabase/upsert.ts +++ b/apps/sim/tools/supabase/upsert.ts @@ -43,7 +43,12 @@ export const upsertTool: ToolConfig `${supabaseBaseUrl(params.projectId)}/rest/v1/${params.table}?select=*`, + url: (params) => { + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { + throw new Error('Invalid table name: must contain only letters, digits, and underscores') + } + return `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` + }, method: 'POST', headers: (params) => { const headers: Record = { From 2d86b7b50ea00165f882f5afb0272562d2325c86 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 15:04:51 -0700 Subject: [PATCH 04/17] fix(security): address PR review comments and harden deepsec fixes - fix(env): replace jsonb operators with transaction+FOR UPDATE read-modify-write - PUT: uses db.transaction + SELECT FOR UPDATE + JS merge to avoid lost-update race - DELETE: same pattern; fixes variable scope bug where current was referenced outside tx - removes broken || and - jsonb operators that fail on json-typed column - fix(ssh): trim truncated output consistently with non-truncated path - fix(gmail): remove redundant resolveOAuthAccountId call - adds credentialType field to CredentialAccessResult - authorizeCredentialUse now returns credentialType in all success paths - gmail/labels route uses authz.credentialType and authz.resolvedCredentialId directly - fix(supabase): centralize table identifier validation - adds validateDatabaseIdentifier() to input-validation.ts - all 8 supabase tools use the shared util instead of inline regex --- apps/sim/app/api/tools/gmail/labels/route.ts | 12 +- apps/sim/app/api/tools/ssh/utils.ts | 4 +- .../api/workspaces/[id]/environment/route.ts | 117 ++++++++++-------- apps/sim/lib/auth/credential-access.ts | 5 + .../sim/lib/core/security/input-validation.ts | 27 ++++ apps/sim/tools/supabase/count.ts | 6 +- apps/sim/tools/supabase/delete.ts | 6 +- apps/sim/tools/supabase/get_row.ts | 6 +- apps/sim/tools/supabase/insert.ts | 6 +- apps/sim/tools/supabase/query.ts | 6 +- apps/sim/tools/supabase/text_search.ts | 6 +- apps/sim/tools/supabase/update.ts | 6 +- apps/sim/tools/supabase/upsert.ts | 6 +- 13 files changed, 126 insertions(+), 87 deletions(-) diff --git a/apps/sim/app/api/tools/gmail/labels/route.ts b/apps/sim/app/api/tools/gmail/labels/route.ts index 6009125fe5..3b05cf12a9 100644 --- a/apps/sim/app/api/tools/gmail/labels/route.ts +++ b/apps/sim/app/api/tools/gmail/labels/route.ts @@ -10,7 +10,6 @@ import { getScopesForService } from '@/lib/oauth/utils' import { getServiceAccountToken, refreshAccessTokenIfNeeded, - resolveOAuthAccountId, ServiceAccountTokenError, } from '@/app/api/auth/oauth/utils' export const dynamic = 'force-dynamic' @@ -44,20 +43,15 @@ export const GET = withRouteHandler(async (request: NextRequest) => { credentialId, requireWorkflowIdForInternal: false, }) - if (!authz.ok || !authz.credentialOwnerUserId) { + if (!authz.ok || !authz.credentialOwnerUserId || !authz.resolvedCredentialId) { return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 }) } - const resolved = await resolveOAuthAccountId(credentialId) - if (!resolved) { - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) - } - let accessToken: string | null = null - if (resolved.credentialType === 'service_account' && resolved.credentialId) { + if (authz.credentialType === 'service_account') { accessToken = await getServiceAccountToken( - resolved.credentialId, + authz.resolvedCredentialId, getScopesForService('gmail'), impersonateEmail ) diff --git a/apps/sim/app/api/tools/ssh/utils.ts b/apps/sim/app/api/tools/ssh/utils.ts index 48a51eb76c..3d64440e22 100644 --- a/apps/sim/app/api/tools/ssh/utils.ts +++ b/apps/sim/app/api/tools/ssh/utils.ts @@ -197,10 +197,10 @@ export function executeSSHCommand(client: Client, command: string): Promise { resolve({ stdout: stdoutTruncated - ? `${stdout}\n[output truncated: exceeded 16MB limit]` + ? `${stdout.trim()}\n[output truncated: exceeded 16MB limit]` : stdout.trim(), stderr: stderrTruncated - ? `${stderr}\n[stderr truncated: exceeded 16MB limit]` + ? `${stderr.trim()}\n[stderr truncated: exceeded 16MB limit]` : stderr.trim(), exitCode: code ?? -1, }) diff --git a/apps/sim/app/api/workspaces/[id]/environment/route.ts b/apps/sim/app/api/workspaces/[id]/environment/route.ts index a4b32707ac..36d79d849d 100644 --- a/apps/sim/app/api/workspaces/[id]/environment/route.ts +++ b/apps/sim/app/api/workspaces/[id]/environment/route.ts @@ -96,16 +96,6 @@ export const PUT = withRouteHandler( if (!parsed.success) return parsed.response const { variables } = parsed.data.body - // Read existing encrypted ws vars - const existingRows = await db - .select() - .from(workspaceEnvironment) - .where(eq(workspaceEnvironment.workspaceId, workspaceId)) - .limit(1) - - const existingEncrypted: Record = (existingRows[0]?.variables as any) || {} - - // Encrypt incoming const encryptedIncoming = await Promise.all( Object.entries(variables).map(async ([key, value]) => { const { encrypted } = await encryptSecret(value) @@ -113,24 +103,40 @@ export const PUT = withRouteHandler( }) ).then((entries) => Object.fromEntries(entries)) - await db - .insert(workspaceEnvironment) - .values({ - id: generateId(), - workspaceId, - variables: encryptedIncoming, - createdAt: new Date(), - updatedAt: new Date(), - }) - .onConflictDoUpdate({ - target: [workspaceEnvironment.workspaceId], - set: { - variables: sql`${workspaceEnvironment.variables} || EXCLUDED.variables`, + const { existingEncrypted, merged } = await db.transaction(async (tx) => { + await tx.execute( + sql`SELECT id FROM workspace_environment WHERE workspace_id = ${workspaceId} FOR UPDATE` + ) + + const [existingRow] = await tx + .select() + .from(workspaceEnvironment) + .where(eq(workspaceEnvironment.workspaceId, workspaceId)) + .limit(1) + + const existing = ((existingRow?.variables as Record) ?? {}) as Record< + string, + string + > + const mergedVars = { ...existing, ...encryptedIncoming } + + await tx + .insert(workspaceEnvironment) + .values({ + id: generateId(), + workspaceId, + variables: mergedVars, + createdAt: new Date(), updatedAt: new Date(), - }, - }) + }) + .onConflictDoUpdate({ + target: [workspaceEnvironment.workspaceId], + set: { variables: mergedVars, updatedAt: new Date() }, + }) + + return { existingEncrypted: existing, merged: mergedVars } + }) - const merged = { ...existingEncrypted, ...encryptedIncoming } const newKeys = Object.keys(variables).filter((k) => !(k in existingEncrypted)) await createWorkspaceEnvCredentials({ workspaceId, newKeys, actingUserId: userId }) @@ -184,36 +190,43 @@ export const DELETE = withRouteHandler( if (!parsed.success) return parsed.response const { keys } = parsed.data.body - const wsRows = await db - .select() - .from(workspaceEnvironment) - .where(eq(workspaceEnvironment.workspaceId, workspaceId)) - .limit(1) - - const current: Record = (wsRows[0]?.variables as any) || {} - let changed = false - for (const k of keys) { - if (k in current) { - delete current[k] - changed = true + const result = await db.transaction(async (tx) => { + await tx.execute( + sql`SELECT id FROM workspace_environment WHERE workspace_id = ${workspaceId} FOR UPDATE` + ) + + const [existingRow] = await tx + .select() + .from(workspaceEnvironment) + .where(eq(workspaceEnvironment.workspaceId, workspaceId)) + .limit(1) + + if (!existingRow) return null + + const current: Record = + (existingRow.variables as Record) ?? {} + let modified = false + for (const k of keys) { + if (k in current) { + delete current[k] + modified = true + } } - } - if (!changed) { + if (!modified) return null + + await tx + .update(workspaceEnvironment) + .set({ variables: current, updatedAt: new Date() }) + .where(eq(workspaceEnvironment.workspaceId, workspaceId)) + + return { remainingKeysCount: Object.keys(current).length } + }) + + if (!result) { return NextResponse.json({ success: true }) } - await db - .update(workspaceEnvironment) - .set({ - variables: sql`${workspaceEnvironment.variables} - ARRAY[${sql.join( - keys.map((k) => sql`${k}`), - sql`, ` - )}]::text[]`, - updatedAt: new Date(), - }) - .where(eq(workspaceEnvironment.workspaceId, workspaceId)) - await deleteWorkspaceEnvCredentials({ workspaceId, removedKeys: keys }) recordAudit({ @@ -227,7 +240,7 @@ export const DELETE = withRouteHandler( description: `Removed ${keys.length} workspace environment variable(s)`, metadata: { removedKeys: keys, - remainingKeysCount: Object.keys(current).length, + remainingKeysCount: result.remainingKeysCount, }, request, }) diff --git a/apps/sim/lib/auth/credential-access.ts b/apps/sim/lib/auth/credential-access.ts index 05e017c87a..e1ad1690bd 100644 --- a/apps/sim/lib/auth/credential-access.ts +++ b/apps/sim/lib/auth/credential-access.ts @@ -13,6 +13,7 @@ export interface CredentialAccessResult { credentialOwnerUserId?: string workspaceId?: string resolvedCredentialId?: string + credentialType?: string } /** @@ -114,6 +115,7 @@ export async function authorizeCredentialUse( credentialOwnerUserId: actingUserId, workspaceId: platformCredential.workspaceId, resolvedCredentialId: platformCredential.id, + credentialType: 'service_account', } } @@ -182,6 +184,7 @@ export async function authorizeCredentialUse( credentialOwnerUserId: accountRow.userId, workspaceId: platformCredential.workspaceId, resolvedCredentialId: platformCredential.accountId, + credentialType: 'oauth', } } @@ -252,6 +255,7 @@ export async function authorizeCredentialUse( credentialOwnerUserId: accountRow.userId, workspaceId: workflowContext.workspaceId, resolvedCredentialId: workspaceCredential.accountId, + credentialType: 'oauth', } } @@ -279,5 +283,6 @@ export async function authorizeCredentialUse( requesterUserId: auth.userId, credentialOwnerUserId: legacyAccount.userId, resolvedCredentialId: credentialId, + credentialType: 'oauth', } } diff --git a/apps/sim/lib/core/security/input-validation.ts b/apps/sim/lib/core/security/input-validation.ts index 12591aeb25..98ac9e1c98 100644 --- a/apps/sim/lib/core/security/input-validation.ts +++ b/apps/sim/lib/core/security/input-validation.ts @@ -1593,3 +1593,30 @@ export function validateWorkdayTenantUrl( return { isValid: true, sanitized: url as string } } + +/** + * Validates a database identifier (table or column name) to prevent SQL injection. + * + * Accepts only identifiers that start with a letter or underscore and contain + * only letters, digits, and underscores — the safe subset of SQL identifiers. + * + * @param value - The identifier to validate + * @param paramName - Name of the parameter for error messages (e.g. 'table', 'column') + * @returns ValidationResult with isValid flag and optional error message + */ +export function validateDatabaseIdentifier( + value: unknown, + paramName = 'identifier' +): ValidationResult { + if (typeof value !== 'string' || value.length === 0) { + return { isValid: false, error: `${paramName} is required` } + } + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(value)) { + logger.warn('Invalid database identifier', { paramName, value: value.substring(0, 100) }) + return { + isValid: false, + error: `Invalid ${paramName}: must start with a letter or underscore and contain only letters, digits, and underscores`, + } + } + return { isValid: true, sanitized: value } +} diff --git a/apps/sim/tools/supabase/count.ts b/apps/sim/tools/supabase/count.ts index dd348d313b..7e5d2c0f3a 100644 --- a/apps/sim/tools/supabase/count.ts +++ b/apps/sim/tools/supabase/count.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseCountParams, SupabaseCountResponse } from '@/tools/supabase/types' import { supabaseBaseUrl } from '@/tools/supabase/utils' import type { ToolConfig } from '@/tools/types' @@ -50,9 +51,8 @@ export const countTool: ToolConfig = request: { url: (params) => { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { - throw new Error('Invalid table name: must contain only letters, digits, and underscores') - } + const tableValidation = validateDatabaseIdentifier(params.table, 'table') + if (!tableValidation.isValid) throw new Error(tableValidation.error) let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` if (params.filter?.trim()) { diff --git a/apps/sim/tools/supabase/delete.ts b/apps/sim/tools/supabase/delete.ts index 96b337bd95..967229868e 100644 --- a/apps/sim/tools/supabase/delete.ts +++ b/apps/sim/tools/supabase/delete.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseDeleteParams, SupabaseDeleteResponse } from '@/tools/supabase/types' import { supabaseBaseUrl } from '@/tools/supabase/utils' import type { ToolConfig } from '@/tools/types' @@ -44,9 +45,8 @@ export const deleteTool: ToolConfig { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { - throw new Error('Invalid table name: must contain only letters, digits, and underscores') - } + const tableValidation = validateDatabaseIdentifier(params.table, 'table') + if (!tableValidation.isValid) throw new Error(tableValidation.error) let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` diff --git a/apps/sim/tools/supabase/get_row.ts b/apps/sim/tools/supabase/get_row.ts index 8fd3909a9f..dec54e3d58 100644 --- a/apps/sim/tools/supabase/get_row.ts +++ b/apps/sim/tools/supabase/get_row.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseGetRowParams, SupabaseGetRowResponse } from '@/tools/supabase/types' import { supabaseBaseUrl } from '@/tools/supabase/utils' import type { ToolConfig } from '@/tools/types' @@ -50,9 +51,8 @@ export const getRowTool: ToolConfig { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { - throw new Error('Invalid table name: must contain only letters, digits, and underscores') - } + const tableValidation = validateDatabaseIdentifier(params.table, 'table') + if (!tableValidation.isValid) throw new Error(tableValidation.error) const selectColumns = params.select?.trim() || '*' let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=${encodeURIComponent(selectColumns)}` diff --git a/apps/sim/tools/supabase/insert.ts b/apps/sim/tools/supabase/insert.ts index 2d9f4bccc7..39dc4264e5 100644 --- a/apps/sim/tools/supabase/insert.ts +++ b/apps/sim/tools/supabase/insert.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseInsertParams, SupabaseInsertResponse } from '@/tools/supabase/types' import { supabaseBaseUrl } from '@/tools/supabase/utils' import type { ToolConfig } from '@/tools/types' @@ -44,9 +45,8 @@ export const insertTool: ToolConfig { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { - throw new Error('Invalid table name: must contain only letters, digits, and underscores') - } + const tableValidation = validateDatabaseIdentifier(params.table, 'table') + if (!tableValidation.isValid) throw new Error(tableValidation.error) return `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` }, method: 'POST', diff --git a/apps/sim/tools/supabase/query.ts b/apps/sim/tools/supabase/query.ts index 6eb48b1a2b..0fcf1b75dd 100644 --- a/apps/sim/tools/supabase/query.ts +++ b/apps/sim/tools/supabase/query.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseQueryParams, SupabaseQueryResponse } from '@/tools/supabase/types' import { supabaseBaseUrl } from '@/tools/supabase/utils' import type { ToolConfig } from '@/tools/types' @@ -68,9 +69,8 @@ export const queryTool: ToolConfig = request: { url: (params) => { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { - throw new Error('Invalid table name: must contain only letters, digits, and underscores') - } + const tableValidation = validateDatabaseIdentifier(params.table, 'table') + if (!tableValidation.isValid) throw new Error(tableValidation.error) const selectColumns = params.select?.trim() || '*' let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=${encodeURIComponent(selectColumns)}` diff --git a/apps/sim/tools/supabase/text_search.ts b/apps/sim/tools/supabase/text_search.ts index 0f9fff547e..aa7e7402b6 100644 --- a/apps/sim/tools/supabase/text_search.ts +++ b/apps/sim/tools/supabase/text_search.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseTextSearchParams, SupabaseTextSearchResponse } from '@/tools/supabase/types' import { supabaseBaseUrl } from '@/tools/supabase/utils' import type { ToolConfig } from '@/tools/types' @@ -74,9 +75,8 @@ export const textSearchTool: ToolConfig { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { - throw new Error('Invalid table name: must contain only letters, digits, and underscores') - } + const tableValidation = validateDatabaseIdentifier(params.table, 'table') + if (!tableValidation.isValid) throw new Error(tableValidation.error) const searchType = params.searchType || 'websearch' const language = params.language || 'english' diff --git a/apps/sim/tools/supabase/update.ts b/apps/sim/tools/supabase/update.ts index 5877c17ca4..28c26e53d8 100644 --- a/apps/sim/tools/supabase/update.ts +++ b/apps/sim/tools/supabase/update.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseUpdateParams, SupabaseUpdateResponse } from '@/tools/supabase/types' import { supabaseBaseUrl } from '@/tools/supabase/utils' import type { ToolConfig } from '@/tools/types' @@ -50,9 +51,8 @@ export const updateTool: ToolConfig { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { - throw new Error('Invalid table name: must contain only letters, digits, and underscores') - } + const tableValidation = validateDatabaseIdentifier(params.table, 'table') + if (!tableValidation.isValid) throw new Error(tableValidation.error) let url = `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` diff --git a/apps/sim/tools/supabase/upsert.ts b/apps/sim/tools/supabase/upsert.ts index e5d82c348d..8b0fe7213d 100644 --- a/apps/sim/tools/supabase/upsert.ts +++ b/apps/sim/tools/supabase/upsert.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseUpsertParams, SupabaseUpsertResponse } from '@/tools/supabase/types' import { supabaseBaseUrl } from '@/tools/supabase/utils' import type { ToolConfig } from '@/tools/types' @@ -44,9 +45,8 @@ export const upsertTool: ToolConfig { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(params.table)) { - throw new Error('Invalid table name: must contain only letters, digits, and underscores') - } + const tableValidation = validateDatabaseIdentifier(params.table, 'table') + if (!tableValidation.isValid) throw new Error(tableValidation.error) return `${supabaseBaseUrl(params.projectId)}/rest/v1/${encodeURIComponent(params.table)}?select=*` }, method: 'POST', From ba4738b1332bee517a73c2d2fe5cc3f5cba34df9 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 15:16:35 -0700 Subject: [PATCH 05/17] fix(workflows): fix VariableType assignment in admin workflow import route MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The intermediate Record cast used 'string' for the type field which TypeScript correctly rejected — WorkflowVariable.type is 'VariableType', not string. Changed the cast to use VariableType so both branches typecheck correctly. --- apps/sim/app/api/v1/admin/workflows/import/route.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/apps/sim/app/api/v1/admin/workflows/import/route.ts b/apps/sim/app/api/v1/admin/workflows/import/route.ts index b3ad741416..5089e12115 100644 --- a/apps/sim/app/api/v1/admin/workflows/import/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/import/route.ts @@ -34,6 +34,7 @@ import { } from '@/app/api/v1/admin/responses' import { extractWorkflowMetadata, + type VariableType, type WorkflowImportRequest, type WorkflowVariable, } from '@/app/api/v1/admin/types' @@ -126,14 +127,14 @@ export const POST = withRouteHandler( const variablesRecord: Record = {} const vars = workflowData.variables as Record< string, - { id?: string; name: string; type?: string; value: unknown } + { id?: string; name: string; type?: VariableType; value: unknown } > Object.entries(vars).forEach(([key, v]) => { const varId = v.id || key variablesRecord[varId] = { id: varId, name: v.name, - type: v.type || 'string', + type: v.type ?? 'string', value: v.value, } }) @@ -149,7 +150,7 @@ export const POST = withRouteHandler( variablesRecord[varId] = { id: varId, name: v.name, - type: v.type || 'string', + type: (v.type as VariableType) ?? 'string', value: v.value, } }) From 7e1104ee6ad3d06c03cef4c2efef8a5d99c23ad3 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 15:30:35 -0700 Subject: [PATCH 06/17] fix(a2a): handle Request objects in pinnedFetch URL extraction --- apps/sim/lib/a2a/utils.ts | 60 +++++++++++++++++++++++++-------------- 1 file changed, 38 insertions(+), 22 deletions(-) diff --git a/apps/sim/lib/a2a/utils.ts b/apps/sim/lib/a2a/utils.ts index 35a9faa51d..b30cb38576 100644 --- a/apps/sim/lib/a2a/utils.ts +++ b/apps/sim/lib/a2a/utils.ts @@ -68,32 +68,48 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis const resolvedIP = validation.resolvedIP! - const pinnedFetch = ( + const pinnedFetch = async ( input: Parameters[0], init?: Parameters[1] - ): Promise => - secureFetchWithPinnedIP(input.toString(), resolvedIP, { - method: init?.method, - headers: - init?.headers instanceof Headers - ? Object.fromEntries(init.headers.entries()) - : (init?.headers as Record | undefined), - body: - typeof init?.body === 'string' || Buffer.isBuffer(init?.body) - ? (init?.body as string | Buffer) - : init?.body instanceof Uint8Array + ): Promise => { + const url = input instanceof Request ? input.url : input.toString() + const method = init?.method ?? (input instanceof Request ? input.method : undefined) + + const rawHeaders = init?.headers ?? (input instanceof Request ? input.headers : undefined) + const headers = + rawHeaders instanceof Headers + ? Object.fromEntries(rawHeaders.entries()) + : (rawHeaders as Record | undefined) + + let body: string | Buffer | Uint8Array | undefined + if (init?.body !== undefined && init.body !== null) { + body = + typeof init.body === 'string' || Buffer.isBuffer(init.body) + ? (init.body as string | Buffer) + : init.body instanceof Uint8Array ? (init.body as Uint8Array) - : undefined, - signal: init?.signal instanceof AbortSignal ? init.signal : undefined, - }).then(async (res) => { - const headers = new Headers(res.headers.toRecord()) - const body = await res.text() - return new Response(body, { - status: res.status, - statusText: res.statusText, - headers, - }) + : undefined + } else if (input instanceof Request && !input.bodyUsed) { + const text = await input.text() + if (text) body = text + } + + const signal = + init?.signal instanceof AbortSignal + ? init.signal + : input instanceof Request && input.signal instanceof AbortSignal + ? input.signal + : undefined + + const res = await secureFetchWithPinnedIP(url, resolvedIP, { method, headers, body, signal }) + const resHeaders = new Headers(res.headers.toRecord()) + const resBody = await res.text() + return new Response(resBody, { + status: res.status, + statusText: res.statusText, + headers: resHeaders, }) + } const pinnedTransports = [ new JsonRpcTransportFactory({ fetchImpl: pinnedFetch }), From b7d57afa3a795ec1d709fa6f7aa444335814e856 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 15:46:48 -0700 Subject: [PATCH 07/17] fix(security): extract shared file-access guard; merge workspace/mothership branch --- apps/sim/app/api/files/authorization.ts | 31 +++++++++++++++++++ apps/sim/app/api/files/multipart/route.ts | 24 +------------- apps/sim/app/api/tools/sftp/upload/route.ts | 25 +++++---------- .../app/api/tools/sharepoint/upload/route.ts | 20 ++---------- apps/sim/app/api/tools/smtp/send/route.ts | 20 ++---------- 5 files changed, 46 insertions(+), 74 deletions(-) diff --git a/apps/sim/app/api/files/authorization.ts b/apps/sim/app/api/files/authorization.ts index a6fffe1d41..ef5183ae0d 100644 --- a/apps/sim/app/api/files/authorization.ts +++ b/apps/sim/app/api/files/authorization.ts @@ -2,6 +2,7 @@ import { db } from '@sim/db' import { document, knowledgeBase, workspaceFile } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull, like, or } from 'drizzle-orm' +import { NextResponse } from 'next/server' import { getFileMetadata } from '@/lib/uploads' import type { StorageContext } from '@/lib/uploads/config' import { BLOB_CHAT_CONFIG, S3_CHAT_CONFIG } from '@/lib/uploads/config' @@ -587,6 +588,36 @@ async function authorizeFileAccess( } } +/** + * Guard helper for tool routes that download user files from storage. + * + * Validates that `key` is a non-empty string, that `userId` is present, and + * that the authenticated user owns the file. Returns a 404 `NextResponse` on + * any failure so callers can `return` it immediately; returns `null` when + * access is granted. + */ +export async function assertToolFileAccess( + key: unknown, + userId: string | undefined, + requestId: string, + routeLogger: ReturnType +): Promise { + if (typeof key !== 'string' || key.length === 0) { + routeLogger.warn(`[${requestId}] File access check rejected: missing key`) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + if (!userId) { + routeLogger.warn(`[${requestId}] File access check requires userId but none available`) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + const hasAccess = await verifyFileAccess(key, userId) + if (!hasAccess) { + routeLogger.warn(`[${requestId}] File access denied for user`, { userId, key }) + return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) + } + return null +} + /** * Get chat storage configuration based on current storage provider */ diff --git a/apps/sim/app/api/files/multipart/route.ts b/apps/sim/app/api/files/multipart/route.ts index 84420450b6..836fc40ad6 100644 --- a/apps/sim/app/api/files/multipart/route.ts +++ b/apps/sim/app/api/files/multipart/route.ts @@ -136,29 +136,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const config = getStorageConfig(storageContext) let customKey: string | undefined - if (context === 'workspace') { - const { MAX_WORKSPACE_FILE_SIZE } = await import('@/lib/uploads/shared/types') - if (typeof fileSize === 'number' && fileSize > MAX_WORKSPACE_FILE_SIZE) { - return NextResponse.json( - { error: `File size exceeds maximum of ${MAX_WORKSPACE_FILE_SIZE} bytes` }, - { status: 413 } - ) - } - - const { generateWorkspaceFileKey } = await import( - '@/lib/uploads/contexts/workspace/workspace-file-manager' - ) - customKey = generateWorkspaceFileKey(workspaceId, fileName) - - const { checkStorageQuota } = await import('@/lib/billing/storage') - const quotaCheck = await checkStorageQuota(userId, fileSize) - if (!quotaCheck.allowed) { - return NextResponse.json( - { error: quotaCheck.error || 'Storage limit exceeded' }, - { status: 413 } - ) - } - } else if (context === 'mothership') { + if (context === 'workspace' || context === 'mothership') { const { MAX_WORKSPACE_FILE_SIZE } = await import('@/lib/uploads/shared/types') if (typeof fileSize === 'number' && fileSize > MAX_WORKSPACE_FILE_SIZE) { return NextResponse.json( diff --git a/apps/sim/app/api/tools/sftp/upload/route.ts b/apps/sim/app/api/tools/sftp/upload/route.ts index 01242b2bfa..8acf93ca58 100644 --- a/apps/sim/app/api/tools/sftp/upload/route.ts +++ b/apps/sim/app/api/tools/sftp/upload/route.ts @@ -7,7 +7,7 @@ import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' -import { verifyFileAccess } from '@/app/api/files/authorization' +import { assertToolFileAccess } from '@/app/api/files/authorization' import { createSftpConnection, getSftp, @@ -96,22 +96,13 @@ export const POST = withRouteHandler(async (request: NextRequest) => { for (const file of userFiles) { try { - if (typeof file.key !== 'string' || file.key.length === 0) { - logger.warn(`[${requestId}] File access check rejected: missing key`) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } - if (!authResult.userId) { - logger.warn(`[${requestId}] File access check requires userId but none available`) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } - const hasAccess = await verifyFileAccess(file.key, authResult.userId) - if (!hasAccess) { - logger.warn(`[${requestId}] File access denied for user`, { - userId: authResult.userId, - key: file.key, - }) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } + const denied = await assertToolFileAccess( + file.key, + authResult.userId, + requestId, + logger + ) + if (denied) return denied logger.info( `[${requestId}] Downloading file for upload: ${file.name} (${file.size} bytes)` ) diff --git a/apps/sim/app/api/tools/sharepoint/upload/route.ts b/apps/sim/app/api/tools/sharepoint/upload/route.ts index d8dc22f0c2..2229d1ecc6 100644 --- a/apps/sim/app/api/tools/sharepoint/upload/route.ts +++ b/apps/sim/app/api/tools/sharepoint/upload/route.ts @@ -9,7 +9,7 @@ import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' -import { verifyFileAccess } from '@/app/api/files/authorization' +import { assertToolFileAccess } from '@/app/api/files/authorization' import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types' import type { SharepointSkippedFile, SharepointUploadError } from '@/tools/sharepoint/types' @@ -83,22 +83,8 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const errors: SharepointUploadError[] = [] for (const userFile of userFiles) { - if (typeof userFile.key !== 'string' || userFile.key.length === 0) { - logger.warn(`[${requestId}] File access check rejected: missing key`) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } - if (!authResult.userId) { - logger.warn(`[${requestId}] File access check requires userId but none available`) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } - const hasAccess = await verifyFileAccess(userFile.key, authResult.userId) - if (!hasAccess) { - logger.warn(`[${requestId}] File access denied for user`, { - userId: authResult.userId, - key: userFile.key, - }) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } + const denied = await assertToolFileAccess(userFile.key, authResult.userId, requestId, logger) + if (denied) return denied logger.info(`[${requestId}] Uploading file: ${userFile.name}`) const buffer = await downloadFileFromStorage(userFile, requestId, logger) diff --git a/apps/sim/app/api/tools/smtp/send/route.ts b/apps/sim/app/api/tools/smtp/send/route.ts index fe6f6b27f5..ea1f5e16d5 100644 --- a/apps/sim/app/api/tools/smtp/send/route.ts +++ b/apps/sim/app/api/tools/smtp/send/route.ts @@ -10,7 +10,7 @@ import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' -import { verifyFileAccess } from '@/app/api/files/authorization' +import { assertToolFileAccess } from '@/app/api/files/authorization' export const dynamic = 'force-dynamic' @@ -122,22 +122,8 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const attachmentBuffers: { filename: string; content: Buffer; contentType: string }[] = [] for (const file of attachments) { - if (typeof file.key !== 'string' || file.key.length === 0) { - logger.warn(`[${requestId}] File access check rejected: missing key`) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } - if (!authResult.userId) { - logger.warn(`[${requestId}] File access check requires userId but none available`) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } - const hasAccess = await verifyFileAccess(file.key, authResult.userId) - if (!hasAccess) { - logger.warn(`[${requestId}] File access denied for user`, { - userId: authResult.userId, - key: file.key, - }) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } + const denied = await assertToolFileAccess(file.key, authResult.userId, requestId, logger) + if (denied) return denied try { logger.info(`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`) const buffer = await downloadFileFromStorage(file, requestId, logger) From 1fdb0df7ae6a3f5b82bcce80112ca6efa8ef5b2b Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 16:08:02 -0700 Subject: [PATCH 08/17] fix(security): advisory lock for env first-insert race; handle all BodyInit types in pinnedFetch --- .../api/workspaces/[id]/environment/route.ts | 10 ++++------ apps/sim/lib/a2a/utils.ts | 17 +++++++++++------ 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/apps/sim/app/api/workspaces/[id]/environment/route.ts b/apps/sim/app/api/workspaces/[id]/environment/route.ts index 36d79d849d..017c5920c5 100644 --- a/apps/sim/app/api/workspaces/[id]/environment/route.ts +++ b/apps/sim/app/api/workspaces/[id]/environment/route.ts @@ -104,9 +104,9 @@ export const PUT = withRouteHandler( ).then((entries) => Object.fromEntries(entries)) const { existingEncrypted, merged } = await db.transaction(async (tx) => { - await tx.execute( - sql`SELECT id FROM workspace_environment WHERE workspace_id = ${workspaceId} FOR UPDATE` - ) + // Advisory lock serialises all writes for this workspaceId, including concurrent + // first-inserts where no row exists yet and FOR UPDATE would acquire nothing. + await tx.execute(sql`SELECT pg_advisory_xact_lock(hashtext(${workspaceId}))`) const [existingRow] = await tx .select() @@ -191,9 +191,7 @@ export const DELETE = withRouteHandler( const { keys } = parsed.data.body const result = await db.transaction(async (tx) => { - await tx.execute( - sql`SELECT id FROM workspace_environment WHERE workspace_id = ${workspaceId} FOR UPDATE` - ) + await tx.execute(sql`SELECT pg_advisory_xact_lock(hashtext(${workspaceId}))`) const [existingRow] = await tx .select() diff --git a/apps/sim/lib/a2a/utils.ts b/apps/sim/lib/a2a/utils.ts index b30cb38576..288477071a 100644 --- a/apps/sim/lib/a2a/utils.ts +++ b/apps/sim/lib/a2a/utils.ts @@ -83,12 +83,17 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis let body: string | Buffer | Uint8Array | undefined if (init?.body !== undefined && init.body !== null) { - body = - typeof init.body === 'string' || Buffer.isBuffer(init.body) - ? (init.body as string | Buffer) - : init.body instanceof Uint8Array - ? (init.body as Uint8Array) - : undefined + if (typeof init.body === 'string' || Buffer.isBuffer(init.body)) { + body = init.body as string | Buffer + } else if (init.body instanceof Uint8Array) { + body = init.body + } else if (init.body instanceof ArrayBuffer) { + body = new Uint8Array(init.body) + } else { + // URLSearchParams, Blob, ReadableStream, FormData — read as text via Response + const text = await new Response(init.body as BodyInit).text() + if (text) body = text + } } else if (input instanceof Request && !input.bodyUsed) { const text = await input.text() if (text) body = text From feea2e355a6b3dfc7394c4a59b343f97c42d20a3 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 16:09:26 -0700 Subject: [PATCH 09/17] chore: remove inline comment from advisory lock --- apps/sim/app/api/workspaces/[id]/environment/route.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/apps/sim/app/api/workspaces/[id]/environment/route.ts b/apps/sim/app/api/workspaces/[id]/environment/route.ts index 017c5920c5..23bd4bd18f 100644 --- a/apps/sim/app/api/workspaces/[id]/environment/route.ts +++ b/apps/sim/app/api/workspaces/[id]/environment/route.ts @@ -104,8 +104,6 @@ export const PUT = withRouteHandler( ).then((entries) => Object.fromEntries(entries)) const { existingEncrypted, merged } = await db.transaction(async (tx) => { - // Advisory lock serialises all writes for this workspaceId, including concurrent - // first-inserts where no row exists yet and FOR UPDATE would acquire nothing. await tx.execute(sql`SELECT pg_advisory_xact_lock(hashtext(${workspaceId}))`) const [existingRow] = await tx From e1a37d7d6b99fcf3af801cec1850d00b9c4b1eb3 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 16:12:06 -0700 Subject: [PATCH 10/17] fix(security): remove stray comment; narrow credentialType to literal union --- apps/sim/lib/a2a/utils.ts | 1 - apps/sim/lib/auth/credential-access.ts | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/sim/lib/a2a/utils.ts b/apps/sim/lib/a2a/utils.ts index 288477071a..7f27fad907 100644 --- a/apps/sim/lib/a2a/utils.ts +++ b/apps/sim/lib/a2a/utils.ts @@ -90,7 +90,6 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis } else if (init.body instanceof ArrayBuffer) { body = new Uint8Array(init.body) } else { - // URLSearchParams, Blob, ReadableStream, FormData — read as text via Response const text = await new Response(init.body as BodyInit).text() if (text) body = text } diff --git a/apps/sim/lib/auth/credential-access.ts b/apps/sim/lib/auth/credential-access.ts index e1ad1690bd..97a9bf50b1 100644 --- a/apps/sim/lib/auth/credential-access.ts +++ b/apps/sim/lib/auth/credential-access.ts @@ -13,7 +13,7 @@ export interface CredentialAccessResult { credentialOwnerUserId?: string workspaceId?: string resolvedCredentialId?: string - credentialType?: string + credentialType?: 'oauth' | 'service_account' } /** From 5719fd65bd7da3b318862c09f4810663e57c370c Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 16:23:45 -0700 Subject: [PATCH 11/17] fix(security): add credentialId validation to wealthbox oauth route; fix null body override in pinnedFetch --- .../sim/app/api/auth/oauth/wealthbox/items/route.ts | 13 +++++++++++++ apps/sim/lib/a2a/utils.ts | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts b/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts index e56f46428b..6a31bcf3b9 100644 --- a/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts +++ b/apps/sim/app/api/auth/oauth/wealthbox/items/route.ts @@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server' import { wealthboxOAuthItemsContract } from '@/lib/api/contracts/selectors/wealthbox' import { parseRequest } from '@/lib/api/server' import { authorizeCredentialUse } from '@/lib/auth/credential-access' +import { validatePathSegment } from '@/lib/core/security/input-validation' import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils' @@ -32,6 +33,18 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const { credentialId, type } = parsed.data.query const query = parsed.data.query.query ?? '' + const credentialIdValidation = validatePathSegment(credentialId, { + paramName: 'credentialId', + maxLength: 100, + allowHyphens: true, + allowUnderscores: true, + allowDots: false, + }) + if (!credentialIdValidation.isValid) { + logger.warn(`[${requestId}] Invalid credentialId format: ${credentialId}`) + return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 }) + } + const authz = await authorizeCredentialUse(request, { credentialId, requireWorkflowIdForInternal: false, diff --git a/apps/sim/lib/a2a/utils.ts b/apps/sim/lib/a2a/utils.ts index 7f27fad907..38198d33fe 100644 --- a/apps/sim/lib/a2a/utils.ts +++ b/apps/sim/lib/a2a/utils.ts @@ -93,7 +93,7 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis const text = await new Response(init.body as BodyInit).text() if (text) body = text } - } else if (input instanceof Request && !input.bodyUsed) { + } else if (init?.body === undefined && input instanceof Request && !input.bodyUsed) { const text = await input.text() if (text) body = text } From 988ce3335d5b4624f8a142c7d0432beacce48a30 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 16:36:47 -0700 Subject: [PATCH 12/17] fix(security): stream A2A response body to unblock SSE; keep text/json/arrayBuffer for non-streaming callers --- apps/sim/lib/a2a/utils.ts | 3 +- .../core/security/input-validation.server.ts | 120 ++++++++++-------- 2 files changed, 66 insertions(+), 57 deletions(-) diff --git a/apps/sim/lib/a2a/utils.ts b/apps/sim/lib/a2a/utils.ts index 38198d33fe..366e0b6c2a 100644 --- a/apps/sim/lib/a2a/utils.ts +++ b/apps/sim/lib/a2a/utils.ts @@ -107,8 +107,7 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis const res = await secureFetchWithPinnedIP(url, resolvedIP, { method, headers, body, signal }) const resHeaders = new Headers(res.headers.toRecord()) - const resBody = await res.text() - return new Response(resBody, { + return new Response(res.body, { status: res.status, statusText: res.statusText, headers: resHeaders, diff --git a/apps/sim/lib/core/security/input-validation.server.ts b/apps/sim/lib/core/security/input-validation.server.ts index 90c65eca62..e72038993f 100644 --- a/apps/sim/lib/core/security/input-validation.server.ts +++ b/apps/sim/lib/core/security/input-validation.server.ts @@ -251,6 +251,7 @@ export interface SecureFetchResponse { status: number statusText: string headers: SecureFetchHeaders + body: ReadableStream | null text: () => Promise json: () => Promise arrayBuffer: () => Promise @@ -361,67 +362,76 @@ export async function secureFetchWithPinnedIP( return } - const chunks: Buffer[] = [] - let totalBytes = 0 - let responseTerminated = false - - res.on('data', (chunk: Buffer) => { - if (responseTerminated) return - - totalBytes += chunk.length - if ( - typeof maxResponseBytes === 'number' && - maxResponseBytes > 0 && - totalBytes > maxResponseBytes - ) { - responseTerminated = true - res.destroy(new Error(`Response exceeded maximum size of ${maxResponseBytes} bytes`)) - return + // Parse headers immediately — they're available before any body data arrives + const headersRecord: Record = {} + let setCookieArray: string[] = [] + for (const [key, value] of Object.entries(res.headers)) { + const lowerKey = key.toLowerCase() + if (lowerKey === 'set-cookie') { + if (Array.isArray(value)) { + setCookieArray = value + headersRecord[lowerKey] = value.join(', ') + } else if (typeof value === 'string') { + setCookieArray = [value] + headersRecord[lowerKey] = value + } + } else if (typeof value === 'string') { + headersRecord[lowerKey] = value + } else if (Array.isArray(value)) { + headersRecord[lowerKey] = value.join(', ') } + } - chunks.push(chunk) - }) - - res.on('error', (error) => { - settledReject(error) + let totalBytes = 0 + const nodeRes = res + const body = new ReadableStream({ + start(controller) { + nodeRes.on('data', (chunk: Buffer) => { + totalBytes += chunk.length + if ( + typeof maxResponseBytes === 'number' && + maxResponseBytes > 0 && + totalBytes > maxResponseBytes + ) { + controller.error( + new Error(`Response exceeded maximum size of ${maxResponseBytes} bytes`) + ) + nodeRes.destroy() + return + } + controller.enqueue(new Uint8Array(chunk)) + }) + nodeRes.on('end', () => controller.close()) + nodeRes.on('error', (err) => controller.error(err)) + }, + cancel() { + nodeRes.destroy() + }, }) - res.on('end', () => { - if (responseTerminated) return - const bodyBuffer = Buffer.concat(chunks) - const body = bodyBuffer.toString('utf-8') - const headersRecord: Record = {} - let setCookieArray: string[] = [] - for (const [key, value] of Object.entries(res.headers)) { - const lowerKey = key.toLowerCase() - if (lowerKey === 'set-cookie') { - if (Array.isArray(value)) { - setCookieArray = value - headersRecord[lowerKey] = value.join(', ') - } else if (typeof value === 'string') { - setCookieArray = [value] - headersRecord[lowerKey] = value - } - } else if (typeof value === 'string') { - headersRecord[lowerKey] = value - } else if (Array.isArray(value)) { - headersRecord[lowerKey] = value.join(', ') - } + async function readBodyAsBuffer(): Promise { + const reader = body.getReader() + const buffers: Uint8Array[] = [] + while (true) { + const { done, value } = await reader.read() + if (done) break + if (value) buffers.push(value) } + return Buffer.concat(buffers.map((b) => Buffer.from(b))) + } - settledResolve({ - ok: statusCode >= 200 && statusCode < 300, - status: statusCode, - statusText: res.statusMessage || '', - headers: new SecureFetchHeaders(headersRecord, setCookieArray), - text: async () => body, - json: async () => JSON.parse(body), - arrayBuffer: async () => - bodyBuffer.buffer.slice( - bodyBuffer.byteOffset, - bodyBuffer.byteOffset + bodyBuffer.byteLength - ), - }) + settledResolve({ + ok: statusCode >= 200 && statusCode < 300, + status: statusCode, + statusText: res.statusMessage || '', + headers: new SecureFetchHeaders(headersRecord, setCookieArray), + body, + text: async () => (await readBodyAsBuffer()).toString('utf-8'), + json: async () => JSON.parse((await readBodyAsBuffer()).toString('utf-8')), + arrayBuffer: async () => { + const buf = await readBodyAsBuffer() + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength) + }, }) }) From e56cb1c88109dd1c1fae5a09d0636e52d5a1e140 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 16:46:00 -0700 Subject: [PATCH 13/17] fix(security): resolve credentialId guard on OneDrive, use assertToolFileAccess in WordPress, memoize body buffer to prevent silent empty reads, fix ArrayBuffer type cast --- .../app/api/tools/onedrive/folders/route.ts | 2 +- .../app/api/tools/wordpress/upload/route.ts | 20 +++------------- .../core/security/input-validation.server.ts | 24 ++++++++++++------- 3 files changed, 19 insertions(+), 27 deletions(-) diff --git a/apps/sim/app/api/tools/onedrive/folders/route.ts b/apps/sim/app/api/tools/onedrive/folders/route.ts index 3d28c1f2a4..4c65c4190f 100644 --- a/apps/sim/app/api/tools/onedrive/folders/route.ts +++ b/apps/sim/app/api/tools/onedrive/folders/route.ts @@ -47,7 +47,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => { credentialId, requireWorkflowIdForInternal: false, }) - if (!authz.ok || !authz.credentialOwnerUserId) { + if (!authz.ok || !authz.credentialOwnerUserId || !authz.resolvedCredentialId) { return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 }) } diff --git a/apps/sim/app/api/tools/wordpress/upload/route.ts b/apps/sim/app/api/tools/wordpress/upload/route.ts index ed52dc6556..859aef52f5 100644 --- a/apps/sim/app/api/tools/wordpress/upload/route.ts +++ b/apps/sim/app/api/tools/wordpress/upload/route.ts @@ -11,7 +11,7 @@ import { processSingleFileToUserFile, } from '@/lib/uploads/utils/file-utils' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' -import { verifyFileAccess } from '@/app/api/files/authorization' +import { assertToolFileAccess } from '@/app/api/files/authorization' export const dynamic = 'force-dynamic' @@ -78,22 +78,8 @@ export const POST = withRouteHandler(async (request: NextRequest) => { ) } - if (typeof userFile.key !== 'string' || userFile.key.length === 0) { - logger.warn(`[${requestId}] File access check rejected: missing key`) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } - if (!authResult.userId) { - logger.warn(`[${requestId}] File access check requires userId but none available`) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } - const hasAccess = await verifyFileAccess(userFile.key, authResult.userId) - if (!hasAccess) { - logger.warn(`[${requestId}] File access denied for user`, { - userId: authResult.userId, - key: userFile.key, - }) - return NextResponse.json({ success: false, error: 'File not found' }, { status: 404 }) - } + const denied = await assertToolFileAccess(userFile.key, authResult.userId, requestId, logger) + if (denied) return denied logger.info(`[${requestId}] Downloading file from storage`, { fileName: userFile.name, diff --git a/apps/sim/lib/core/security/input-validation.server.ts b/apps/sim/lib/core/security/input-validation.server.ts index e72038993f..b90ab6ead4 100644 --- a/apps/sim/lib/core/security/input-validation.server.ts +++ b/apps/sim/lib/core/security/input-validation.server.ts @@ -409,15 +409,21 @@ export async function secureFetchWithPinnedIP( }, }) - async function readBodyAsBuffer(): Promise { - const reader = body.getReader() - const buffers: Uint8Array[] = [] - while (true) { - const { done, value } = await reader.read() - if (done) break - if (value) buffers.push(value) + let bodyBufferPromise: Promise | null = null + function readBodyAsBuffer(): Promise { + if (!bodyBufferPromise) { + bodyBufferPromise = (async () => { + const reader = body.getReader() + const buffers: Uint8Array[] = [] + while (true) { + const { done, value } = await reader.read() + if (done) break + if (value) buffers.push(value) + } + return Buffer.concat(buffers.map((b) => Buffer.from(b))) + })() } - return Buffer.concat(buffers.map((b) => Buffer.from(b))) + return bodyBufferPromise } settledResolve({ @@ -430,7 +436,7 @@ export async function secureFetchWithPinnedIP( json: async () => JSON.parse((await readBodyAsBuffer()).toString('utf-8')), arrayBuffer: async () => { const buf = await readBodyAsBuffer() - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength) + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength) as ArrayBuffer }, }) }) From 7152789a17016100680d290cdd2633f2128d1f94 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 16:57:35 -0700 Subject: [PATCH 14/17] fix(security): handle string[][] HeadersInit format in pinnedFetch --- apps/sim/lib/a2a/utils.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/sim/lib/a2a/utils.ts b/apps/sim/lib/a2a/utils.ts index 366e0b6c2a..d89a8cec04 100644 --- a/apps/sim/lib/a2a/utils.ts +++ b/apps/sim/lib/a2a/utils.ts @@ -79,7 +79,9 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis const headers = rawHeaders instanceof Headers ? Object.fromEntries(rawHeaders.entries()) - : (rawHeaders as Record | undefined) + : Array.isArray(rawHeaders) + ? Object.fromEntries(rawHeaders as string[][]) + : (rawHeaders as Record | undefined) let body: string | Buffer | Uint8Array | undefined if (init?.body !== undefined && init.body !== null) { From 5940ed2fe346ad60e3852878cef4c43a748c96ff Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 17:10:51 -0700 Subject: [PATCH 15/17] fix(security): keep abort listener alive during body streaming; clean up in stream end/error/cancel --- .../sim/lib/core/security/input-validation.server.ts | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/apps/sim/lib/core/security/input-validation.server.ts b/apps/sim/lib/core/security/input-validation.server.ts index b90ab6ead4..fa9fa0134d 100644 --- a/apps/sim/lib/core/security/input-validation.server.ts +++ b/apps/sim/lib/core/security/input-validation.server.ts @@ -401,10 +401,17 @@ export async function secureFetchWithPinnedIP( } controller.enqueue(new Uint8Array(chunk)) }) - nodeRes.on('end', () => controller.close()) - nodeRes.on('error', (err) => controller.error(err)) + nodeRes.on('end', () => { + cleanupAbort() + controller.close() + }) + nodeRes.on('error', (err) => { + cleanupAbort() + controller.error(err) + }) }, cancel() { + cleanupAbort() nodeRes.destroy() }, }) @@ -449,7 +456,6 @@ export async function secureFetchWithPinnedIP( } } const settledResolve: typeof resolve = (value) => { - cleanupAbort() resolve(value) } const settledReject: typeof reject = (reason) => { From 0073513e0a0d8b300a1953baffe869fd38b72890 Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 17:11:35 -0700 Subject: [PATCH 16/17] chore: remove extraneous inline comment --- apps/sim/lib/core/security/input-validation.server.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/apps/sim/lib/core/security/input-validation.server.ts b/apps/sim/lib/core/security/input-validation.server.ts index fa9fa0134d..157b1f5523 100644 --- a/apps/sim/lib/core/security/input-validation.server.ts +++ b/apps/sim/lib/core/security/input-validation.server.ts @@ -362,7 +362,6 @@ export async function secureFetchWithPinnedIP( return } - // Parse headers immediately — they're available before any body data arrives const headersRecord: Record = {} let setCookieArray: string[] = [] for (const [key, value] of Object.entries(res.headers)) { From 31e97dfbaaa9f70c99f5ae7d26a1fa83f1359e4f Mon Sep 17 00:00:00 2001 From: waleed Date: Tue, 12 May 2026 17:24:50 -0700 Subject: [PATCH 17/17] fix(security): cleanup abort listener when maxResponseBytes limit is exceeded --- apps/sim/lib/core/security/input-validation.server.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/sim/lib/core/security/input-validation.server.ts b/apps/sim/lib/core/security/input-validation.server.ts index 157b1f5523..e16bda7c6e 100644 --- a/apps/sim/lib/core/security/input-validation.server.ts +++ b/apps/sim/lib/core/security/input-validation.server.ts @@ -392,6 +392,7 @@ export async function secureFetchWithPinnedIP( maxResponseBytes > 0 && totalBytes > maxResponseBytes ) { + cleanupAbort() controller.error( new Error(`Response exceeded maximum size of ${maxResponseBytes} bytes`) )