From 60b776ae82433dac9ac81e7145b5309e627dfeb4 Mon Sep 17 00:00:00 2001 From: Waleed Latif Date: Fri, 15 May 2026 13:19:16 -0700 Subject: [PATCH 1/3] improvement(providers): align attachment dispatch to vendor SDK types Post-merge audit of #4610 surfaced three follow-ups: 1. xAI Grok vision was blocked. Grok runs through the OpenAI-compatible chat-completions endpoint, so removing xAI from UNSUPPORTED_FILE_PROVIDERS and routing it through the image-only branch restores image attachments on vision models. 2. Azure OpenAI chat-completions deployments blocked any file attachment. Added a per-message image_url parts path; documents still require the Responses API endpoint and throw a clear, actionable error. 3. Wire shapes were loosely typed (Record arrays). Replaced with `satisfies` clauses against each vendor SDK union at every push site: OpenAI Responses/Chat, Anthropic ContentBlockParam, Gemini Part, Bedrock ContentBlock members. AnthropicImageMediaType now derives from Base64ImageSource['media_type'] so it tracks SDK updates. Also collapsed the validation cascade into an exhaustive switch with `never` enforcement, and dropped the redundant per-provider formatMessagesForProvider call from xai/index.ts (providers/index.ts already runs the dispatcher centrally). --- apps/sim/providers/attachments.test.ts | 1 + apps/sim/providers/attachments.ts | 206 ++++++++++++++--------- apps/sim/providers/azure-openai/index.ts | 26 ++- apps/sim/providers/openai/utils.test.ts | 1 + apps/sim/providers/xai/index.ts | 6 +- 5 files changed, 150 insertions(+), 90 deletions(-) diff --git a/apps/sim/providers/attachments.test.ts b/apps/sim/providers/attachments.test.ts index 813a7c54b53..1b3541b10ba 100644 --- a/apps/sim/providers/attachments.test.ts +++ b/apps/sim/providers/attachments.test.ts @@ -66,6 +66,7 @@ describe('provider attachments', () => { { type: 'input_image', image_url: 'data:image/png;base64,iVBORw0KGgo=', + detail: 'auto', }, { type: 'input_file', diff --git a/apps/sim/providers/attachments.ts b/apps/sim/providers/attachments.ts index d165a1fc9e8..840d616efd0 100644 --- a/apps/sim/providers/attachments.ts +++ b/apps/sim/providers/attachments.ts @@ -1,3 +1,7 @@ +import type Anthropic from '@anthropic-ai/sdk' +import type { ContentBlock } from '@aws-sdk/client-bedrock-runtime' +import type { Part } from '@google/genai' +import type OpenAI from 'openai' import { getContentType, getExtensionFromMimeType, @@ -49,7 +53,7 @@ type ProviderFormattedMessage = { [key: string]: unknown } -const AGENT_ATTACHMENT_MAX_BYTES = 10 * 1024 * 1024 +export const AGENT_ATTACHMENT_MAX_BYTES = 10 * 1024 * 1024 const PDF_MIME_TYPE = 'application/pdf' const DOCUMENT_MIME_TYPES = new Set( @@ -76,15 +80,7 @@ const BEDROCK_DOCUMENT_FORMATS = new Set([ const BEDROCK_IMAGE_FORMATS = new Set(['png', 'jpeg', 'jpg', 'gif', 'webp']) const BEDROCK_VIDEO_FORMATS = new Set(['mp4', 'mov', 'mkv', 'webm']) -const IMAGE_ONLY_PROVIDERS = new Set([ - 'mistral', - 'groq', - 'fireworks', - 'ollama', - 'vllm', -]) - -const UNSUPPORTED_FILE_PROVIDERS = new Set(['xai', 'deepseek', 'cerebras']) +const UNSUPPORTED_FILE_PROVIDERS = new Set(['deepseek', 'cerebras']) const PROVIDER_SUPPORTED_LABELS: Record = { openai: 'images and documents through the Responses API input_image/input_file parts', @@ -97,7 +93,7 @@ const PROVIDER_SUPPORTED_LABELS: Record = { fireworks: 'images through image_url message parts on vision models', ollama: 'images through image_url message parts on vision models', vllm: 'images through image_url message parts on multimodal models', - xai: 'no file attachments in the current chat-completions adapter', + xai: 'images through image_url message parts on Grok vision models', deepseek: 'no file attachments in the current API adapter', cerebras: 'no file attachments in the current API adapter', } @@ -119,10 +115,6 @@ export function getAttachmentProvider(providerId: ProviderId | string): Attachme return null } -export function getProviderAttachmentMaxBytes(_providerId: ProviderId | string): number { - return AGENT_ATTACHMENT_MAX_BYTES -} - export function supportsFileAttachments(providerId: ProviderId | string): boolean { const provider = getAttachmentProvider(providerId) return Boolean(provider && !UNSUPPORTED_FILE_PROVIDERS.has(provider)) @@ -223,8 +215,44 @@ function toDataUrl(mimeType: string, base64: string): string { return `data:${mimeType};base64,${base64}` } -function getProviderSupportedLabel(provider: AttachmentProvider): string { - return PROVIDER_SUPPORTED_LABELS[provider] +function isMimeTypeSupportedByProvider( + provider: AttachmentProvider, + mimeType: string, + contentType: PreparedProviderAttachment['contentType'], + extension: string +): boolean { + switch (provider) { + case 'openai': + return isImageMimeType(mimeType) || isOpenAIDocumentMimeType(mimeType) + case 'anthropic': + return ( + isImageMimeType(mimeType) || mimeType === PDF_MIME_TYPE || isTextDocumentMimeType(mimeType) + ) + case 'google': + return GEMINI_INLINE_MIME_TYPES.has(mimeType) || isTextDocumentMimeType(mimeType) + case 'bedrock': + return ( + (contentType === 'image' && BEDROCK_IMAGE_FORMATS.has(extension)) || + (contentType === 'document' && BEDROCK_DOCUMENT_FORMATS.has(extension)) || + (contentType === 'video' && BEDROCK_VIDEO_FORMATS.has(extension)) + ) + case 'openrouter': + return isImageMimeType(mimeType) || mimeType === PDF_MIME_TYPE + case 'mistral': + case 'groq': + case 'fireworks': + case 'ollama': + case 'vllm': + case 'xai': + return isImageMimeType(mimeType) + case 'deepseek': + case 'cerebras': + return false + default: { + const _exhaustive: never = provider + return _exhaustive + } + } } function validateProviderSupport( @@ -233,26 +261,9 @@ function validateProviderSupport( providerId: ProviderId | string ) { const { filename, mimeType, contentType, extension } = attachment - const supportedLabel = getProviderSupportedLabel(provider) - - const supported = - provider === 'openai' - ? isImageMimeType(mimeType) || isOpenAIDocumentMimeType(mimeType) - : provider === 'anthropic' - ? isImageMimeType(mimeType) || - mimeType === PDF_MIME_TYPE || - isTextDocumentMimeType(mimeType) - : provider === 'google' - ? GEMINI_INLINE_MIME_TYPES.has(mimeType) || isTextDocumentMimeType(mimeType) - : provider === 'bedrock' - ? (contentType === 'image' && BEDROCK_IMAGE_FORMATS.has(extension)) || - (contentType === 'document' && BEDROCK_DOCUMENT_FORMATS.has(extension)) || - (contentType === 'video' && BEDROCK_VIDEO_FORMATS.has(extension)) - : provider === 'openrouter' - ? isImageMimeType(mimeType) || mimeType === PDF_MIME_TYPE - : IMAGE_ONLY_PROVIDERS.has(provider) - ? isImageMimeType(mimeType) - : !UNSUPPORTED_FILE_PROVIDERS.has(provider) + const supportedLabel = PROVIDER_SUPPORTED_LABELS[provider] + + const supported = isMimeTypeSupportedByProvider(provider, mimeType, contentType, extension) if (!supported) { throw new Error( @@ -274,25 +285,23 @@ export function prepareProviderAttachments( if (UNSUPPORTED_FILE_PROVIDERS.has(provider)) { throw new Error( - `File attachments are not supported for provider "${providerId}" in the current adapter. Supported attachments: ${getProviderSupportedLabel(provider)}.` + `File attachments are not supported for provider "${providerId}" in the current adapter. Supported attachments: ${PROVIDER_SUPPORTED_LABELS[provider]}.` ) } - const maxBytes = getProviderAttachmentMaxBytes(providerId) - return files.map((file) => { const declaredMimeType = inferAttachmentMimeType(file) const contentType = getAttachmentContentType(declaredMimeType) if (!contentType) { throw new Error( - `File "${file.name}" has MIME type "${declaredMimeType}", which is not supported by provider "${providerId}". Supported attachments: ${getProviderSupportedLabel(provider)}.` + `File "${file.name}" has MIME type "${declaredMimeType}", which is not supported by provider "${providerId}". Supported attachments: ${PROVIDER_SUPPORTED_LABELS[provider]}.` ) } - if (Number.isFinite(file.size) && file.size > maxBytes) { + if (Number.isFinite(file.size) && file.size > AGENT_ATTACHMENT_MAX_BYTES) { const sizeMB = (file.size / (1024 * 1024)).toFixed(2) - const maxMB = (maxBytes / (1024 * 1024)).toFixed(0) + const maxMB = (AGENT_ATTACHMENT_MAX_BYTES / (1024 * 1024)).toFixed(0) throw new Error( `File "${file.name}" (${sizeMB}MB) exceeds the ${maxMB}MB agent attachment limit for provider "${providerId}"` ) @@ -334,28 +343,36 @@ export function prepareProviderAttachments( }) } +type OpenAIResponsesInputContent = OpenAI.Responses.ResponseInputContent +type OpenAIChatContentPart = OpenAI.Chat.Completions.ChatCompletionContentPart +type AnthropicImageMediaType = Anthropic.Messages.Base64ImageSource['media_type'] + export function buildOpenAIMessageContent( content: string | null | undefined, files: UserFile[] | undefined, providerId: ProviderId | string -): string | Array> { +): string | OpenAIResponsesInputContent[] { const attachments = prepareProviderAttachments(files, providerId) if (attachments.length === 0) return content ?? '' - const parts: Array> = [] + const parts: OpenAIResponsesInputContent[] = [] if (content) { - parts.push({ type: 'input_text', text: content }) + parts.push({ type: 'input_text', text: content } satisfies OpenAI.Responses.ResponseInputText) } for (const attachment of attachments) { if (attachment.contentType === 'image') { - parts.push({ type: 'input_image', image_url: attachment.dataUrl }) + parts.push({ + type: 'input_image', + image_url: attachment.dataUrl, + detail: 'auto', + } satisfies OpenAI.Responses.ResponseInputImage) } else { parts.push({ type: 'input_file', filename: attachment.filename, file_data: attachment.dataUrl, - }) + } satisfies OpenAI.Responses.ResponseInputFile) } } @@ -366,10 +383,10 @@ export function buildAnthropicMessageContent( content: string | null | undefined, files: UserFile[] | undefined, providerId: ProviderId | string -): Array> { - const parts: Array> = [] +): Anthropic.Messages.ContentBlockParam[] { + const parts: Anthropic.Messages.ContentBlockParam[] = [] if (content) { - parts.push({ type: 'text', text: content }) + parts.push({ type: 'text', text: content } satisfies Anthropic.Messages.TextBlockParam) } for (const attachment of prepareProviderAttachments(files, providerId)) { @@ -378,10 +395,10 @@ export function buildAnthropicMessageContent( type: 'image', source: { type: 'base64', - media_type: attachment.providerMimeType, + media_type: attachment.providerMimeType as AnthropicImageMediaType, data: attachment.base64, }, - }) + } satisfies Anthropic.Messages.ImageBlockParam) } else if (attachment.text) { parts.push({ type: 'document', @@ -391,17 +408,17 @@ export function buildAnthropicMessageContent( data: attachment.text, }, title: attachment.filename, - }) + } satisfies Anthropic.Messages.DocumentBlockParam) } else { parts.push({ type: 'document', source: { type: 'base64', - media_type: attachment.providerMimeType, + media_type: 'application/pdf', data: attachment.base64, }, title: attachment.filename, - }) + } satisfies Anthropic.Messages.DocumentBlockParam) } } @@ -412,10 +429,10 @@ export function buildGeminiMessageParts( content: string | null | undefined, files: UserFile[] | undefined, providerId: ProviderId | string -): Array> { - const parts: Array> = [] +): Part[] { + const parts: Part[] = [] if (content) { - parts.push({ text: content }) + parts.push({ text: content } satisfies Part) } for (const attachment of prepareProviderAttachments(files, providerId)) { @@ -424,7 +441,7 @@ export function buildGeminiMessageParts( mimeType: attachment.providerMimeType, data: attachment.base64, }, - }) + } satisfies Part) } return parts @@ -434,13 +451,16 @@ export function buildOpenAICompatibleChatContent( content: string | null | undefined, files: UserFile[] | undefined, providerId: ProviderId | string -): string | Array> { +): string | OpenAIChatContentPart[] { const attachments = prepareProviderAttachments(files, providerId) if (attachments.length === 0) return content ?? '' - const parts: Array> = [] + const parts: OpenAIChatContentPart[] = [] if (content) { - parts.push({ type: 'text', text: content }) + parts.push({ + type: 'text', + text: content, + } satisfies OpenAI.Chat.Completions.ChatCompletionContentPartText) } for (const attachment of attachments) { @@ -449,7 +469,7 @@ export function buildOpenAICompatibleChatContent( image_url: { url: attachment.dataUrl, }, - }) + } satisfies OpenAI.Chat.Completions.ChatCompletionContentPartImage) } return parts @@ -459,13 +479,16 @@ export function buildOpenRouterMessageContent( content: string | null | undefined, files: UserFile[] | undefined, providerId: ProviderId | string -): string | Array> { +): string | OpenAIChatContentPart[] { const attachments = prepareProviderAttachments(files, providerId) if (attachments.length === 0) return content ?? '' - const parts: Array> = [] + const parts: OpenAIChatContentPart[] = [] if (content) { - parts.push({ type: 'text', text: content }) + parts.push({ + type: 'text', + text: content, + } satisfies OpenAI.Chat.Completions.ChatCompletionContentPartText) } for (const attachment of attachments) { @@ -473,7 +496,7 @@ export function buildOpenRouterMessageContent( parts.push({ type: 'image_url', image_url: { url: attachment.dataUrl }, - }) + } satisfies OpenAI.Chat.Completions.ChatCompletionContentPartImage) } else { parts.push({ type: 'file', @@ -481,7 +504,7 @@ export function buildOpenRouterMessageContent( filename: attachment.filename, file_data: attachment.dataUrl, }, - }) + } satisfies OpenAI.Chat.Completions.ChatCompletionContentPart.File) } } @@ -508,10 +531,10 @@ export function buildBedrockMessageContent( content: string | null | undefined, files: UserFile[] | undefined, providerId: ProviderId | string -): Array> { - const parts: Array> = [] +): ContentBlock[] { + const parts: ContentBlock[] = [] if (content) { - parts.push({ text: content }) + parts.push({ text: content } as ContentBlock.TextMember) } for (const attachment of prepareProviderAttachments(files, providerId)) { @@ -519,53 +542,70 @@ export function buildBedrockMessageContent( if (attachment.contentType === 'image') { parts.push({ image: { - format: getBedrockImageFormat(attachment), + format: getBedrockImageFormat(attachment) as ContentBlock.ImageMember['image']['format'], source: { bytes }, }, - }) + } as ContentBlock.ImageMember) } else if (attachment.contentType === 'video') { parts.push({ video: { - format: attachment.extension, + format: attachment.extension as ContentBlock.VideoMember['video']['format'], source: { bytes }, }, - }) + } as ContentBlock.VideoMember) } else { parts.push({ document: { - format: getBedrockDocumentFormat(attachment), + format: getBedrockDocumentFormat( + attachment + ) as ContentBlock.DocumentMember['document']['format'], name: sanitizeBedrockName(attachment.filename), source: { bytes }, }, - }) + } as ContentBlock.DocumentMember) } } return parts } +const SDK_NATIVE_ATTACHMENT_PROVIDERS = new Set([ + 'openai', + 'anthropic', + 'google', + 'bedrock', +]) + export function formatMessagesForProvider( messages: ProviderMessageInput[], providerId: ProviderId | string ): ProviderFormattedMessage[] { + const provider = getAttachmentProvider(providerId) + if (provider && SDK_NATIVE_ATTACHMENT_PROVIDERS.has(provider)) { + return messages as ProviderFormattedMessage[] + } + return messages.map((message) => { if (!message.files?.length || (message.role !== 'user' && message.role !== 'assistant')) { return message as ProviderFormattedMessage } - const provider = getAttachmentProvider(providerId) if (provider === 'openrouter') { - const { files: _files, ...rest } = message + const { files: _omit, ...rest } = message return { ...rest, - content: buildOpenRouterMessageContent(message.content, message.files, providerId), + content: buildOpenRouterMessageContent(message.content, message.files, providerId) as + | string + | Array>, } } - const { files: _files, ...rest } = message + const { files: _omit, ...rest } = message return { ...rest, - content: buildOpenAICompatibleChatContent(message.content, message.files, providerId), + content: buildOpenAICompatibleChatContent(message.content, message.files, providerId) as + | string + | Array>, } }) } diff --git a/apps/sim/providers/azure-openai/index.ts b/apps/sim/providers/azure-openai/index.ts index e6bb18655e4..5f78c227ade 100644 --- a/apps/sim/providers/azure-openai/index.ts +++ b/apps/sim/providers/azure-openai/index.ts @@ -3,6 +3,7 @@ import { toError } from '@sim/utils/errors' import { AzureOpenAI } from 'openai' import type { ChatCompletion, + ChatCompletionContentPart, ChatCompletionCreateParamsBase, ChatCompletionCreateParamsStreaming, ChatCompletionMessageParam, @@ -14,6 +15,7 @@ import { env } from '@/lib/core/config/env' import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server' import type { StreamingExecution } from '@/executor/types' import { MAX_TOOL_ITERATIONS } from '@/providers' +import { prepareProviderAttachments } from '@/providers/attachments' import { checkForForcedToolUsage, createReadableStreamFromAzureOpenAIStream, @@ -90,10 +92,28 @@ async function executeChatCompletionsRequest( } if (request.messages) { - if (request.messages.some((message) => message.files?.length)) { - throw new Error('File attachments require an Azure OpenAI Responses API endpoint') + for (const message of request.messages) { + if (!message.files?.length || message.role !== 'user') { + allMessages.push(message as ChatCompletionMessageParam) + continue + } + + const attachments = prepareProviderAttachments(message.files, 'azure-openai') + const nonImage = attachments.find((a) => a.contentType !== 'image') + if (nonImage) { + throw new Error( + `File "${nonImage.filename}" (${nonImage.mimeType}) requires the Azure OpenAI Responses API endpoint; chat-completions deployments support images only` + ) + } + + const parts: ChatCompletionContentPart[] = [] + if (message.content) parts.push({ type: 'text', text: message.content }) + for (const a of attachments) { + parts.push({ type: 'image_url', image_url: { url: a.dataUrl } }) + } + const { files: _files, ...rest } = message + allMessages.push({ ...rest, content: parts } as ChatCompletionMessageParam) } - allMessages.push(...(request.messages as ChatCompletionMessageParam[])) } const tools: ChatCompletionTool[] | undefined = request.tools?.length diff --git a/apps/sim/providers/openai/utils.test.ts b/apps/sim/providers/openai/utils.test.ts index 20fcd484ad8..ba943f3f4aa 100644 --- a/apps/sim/providers/openai/utils.test.ts +++ b/apps/sim/providers/openai/utils.test.ts @@ -32,6 +32,7 @@ describe('buildResponsesInputFromMessages', () => { { type: 'input_image', image_url: 'data:image/png;base64,iVBORw0KGgo=', + detail: 'auto', }, ], }, diff --git a/apps/sim/providers/xai/index.ts b/apps/sim/providers/xai/index.ts index bd6303007df..309a9fd8f3b 100644 --- a/apps/sim/providers/xai/index.ts +++ b/apps/sim/providers/xai/index.ts @@ -4,7 +4,6 @@ import OpenAI from 'openai' import type { ChatCompletionCreateParamsStreaming } from 'openai/resources/chat/completions' import type { StreamingExecution } from '@/executor/types' import { MAX_TOOL_ITERATIONS } from '@/providers' -import { formatMessagesForProvider } from '@/providers/attachments' import { getProviderDefaultModel, getProviderModels } from '@/providers/models' import { enrichLastModelSegmentFromChatCompletions } from '@/providers/trace-enrichment' import type { @@ -77,7 +76,6 @@ export const xAIProvider: ProviderConfig = { if (request.messages) { allMessages.push(...request.messages) } - const formattedMessages = formatMessagesForProvider(allMessages, 'xai') as Message[] const tools = request.tools?.length ? request.tools.map((tool) => ({ type: 'function', @@ -95,7 +93,7 @@ export const xAIProvider: ProviderConfig = { } const basePayload: any = { model: request.model, - messages: formattedMessages, + messages: allMessages, } if (request.temperature !== undefined) basePayload.temperature = request.temperature @@ -221,7 +219,7 @@ export const xAIProvider: ProviderConfig = { } const toolCalls = [] const toolResults: Record[] = [] - const currentMessages = [...formattedMessages] + const currentMessages = [...allMessages] let iterationCount = 0 let hasUsedForcedTool = false From af6f0583553b910040b3a005219ce7936f777b0d Mon Sep 17 00:00:00 2001 From: Waleed Latif Date: Fri, 15 May 2026 13:29:15 -0700 Subject: [PATCH 2/3] fix(providers): restore getProviderAttachmentMaxBytes export and xAI message dispatch MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Restore `getProviderAttachmentMaxBytes` — still consumed by agent-handler.ts for per-provider attachment size limits in file hydration - Restore `formatMessagesForProvider(allMessages, 'xai')` — providers/index.ts does NOT dispatch centrally on this branch; each OpenAI-compat provider formats its own messages. Without it, xAI Grok vision drops image attachments --- apps/sim/providers/attachments.ts | 4 ++++ apps/sim/providers/xai/index.ts | 6 ++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/apps/sim/providers/attachments.ts b/apps/sim/providers/attachments.ts index 840d616efd0..380b4d8c890 100644 --- a/apps/sim/providers/attachments.ts +++ b/apps/sim/providers/attachments.ts @@ -120,6 +120,10 @@ export function supportsFileAttachments(providerId: ProviderId | string): boolea return Boolean(provider && !UNSUPPORTED_FILE_PROVIDERS.has(provider)) } +export function getProviderAttachmentMaxBytes(_providerId: ProviderId | string): number { + return AGENT_ATTACHMENT_MAX_BYTES +} + export function inferAttachmentMimeType(file: UserFile): string { const explicitType = file.type?.trim().toLowerCase() if (explicitType && explicitType !== 'application/octet-stream') { diff --git a/apps/sim/providers/xai/index.ts b/apps/sim/providers/xai/index.ts index 309a9fd8f3b..bd6303007df 100644 --- a/apps/sim/providers/xai/index.ts +++ b/apps/sim/providers/xai/index.ts @@ -4,6 +4,7 @@ import OpenAI from 'openai' import type { ChatCompletionCreateParamsStreaming } from 'openai/resources/chat/completions' import type { StreamingExecution } from '@/executor/types' import { MAX_TOOL_ITERATIONS } from '@/providers' +import { formatMessagesForProvider } from '@/providers/attachments' import { getProviderDefaultModel, getProviderModels } from '@/providers/models' import { enrichLastModelSegmentFromChatCompletions } from '@/providers/trace-enrichment' import type { @@ -76,6 +77,7 @@ export const xAIProvider: ProviderConfig = { if (request.messages) { allMessages.push(...request.messages) } + const formattedMessages = formatMessagesForProvider(allMessages, 'xai') as Message[] const tools = request.tools?.length ? request.tools.map((tool) => ({ type: 'function', @@ -93,7 +95,7 @@ export const xAIProvider: ProviderConfig = { } const basePayload: any = { model: request.model, - messages: allMessages, + messages: formattedMessages, } if (request.temperature !== undefined) basePayload.temperature = request.temperature @@ -219,7 +221,7 @@ export const xAIProvider: ProviderConfig = { } const toolCalls = [] const toolResults: Record[] = [] - const currentMessages = [...allMessages] + const currentMessages = [...formattedMessages] let iterationCount = 0 let hasUsedForcedTool = false From a2fb1a89d7a664a498a53abdeb8eab9b0acbf1a6 Mon Sep 17 00:00:00 2001 From: Waleed Latif Date: Fri, 15 May 2026 15:21:34 -0700 Subject: [PATCH 3/3] fix(providers): tighten ResponsesInputItem content type to SDK ResponseInputContent Build fix: buildOpenAIMessageContent returns ResponseInputContent[] which isn't assignable to Record[] (ResponseInputText lacks an index signature). Align the type to the SDK shape. --- apps/sim/providers/openai/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/sim/providers/openai/utils.ts b/apps/sim/providers/openai/utils.ts index ad6a5215c19..495a0eae05b 100644 --- a/apps/sim/providers/openai/utils.ts +++ b/apps/sim/providers/openai/utils.ts @@ -22,7 +22,7 @@ export interface ResponsesToolCall { export type ResponsesInputItem = | { role: 'system' | 'user' | 'assistant' - content: string | Array> + content: string | OpenAI.Responses.ResponseInputContent[] } | { type: 'function_call'