From db4fb9eeefae32ef0509a698f208d6263bc92ad4 Mon Sep 17 00:00:00 2001 From: Iss <74388823+isshaddad@users.noreply.github.com> Date: Wed, 4 Feb 2026 19:50:59 -0500 Subject: [PATCH 001/225] docs: Add Hookdeck example (#3005) Add documentation for integrating Hookdeck with Trigger.dev to receive webhooks and forward them to Trigger.dev tasks. --- Open with Devin --- docs/docs.json | 3 +- docs/guides/examples/hookdeck-webhook.mdx | 71 +++++++++++++++++++ .../frameworks/webhooks-guides-overview.mdx | 4 ++ docs/guides/introduction.mdx | 1 + 4 files changed, 78 insertions(+), 1 deletion(-) create mode 100644 docs/guides/examples/hookdeck-webhook.mdx diff --git a/docs/docs.json b/docs/docs.json index c1f5d273804..324052905ac 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -352,7 +352,8 @@ "guides/frameworks/webhooks-guides-overview", "guides/frameworks/nextjs-webhooks", "guides/frameworks/remix-webhooks", - "guides/examples/stripe-webhook" + "guides/examples/stripe-webhook", + "guides/examples/hookdeck-webhook" ] } ] diff --git a/docs/guides/examples/hookdeck-webhook.mdx b/docs/guides/examples/hookdeck-webhook.mdx new file mode 100644 index 00000000000..a28949fca94 --- /dev/null +++ b/docs/guides/examples/hookdeck-webhook.mdx @@ -0,0 +1,71 @@ +--- +title: "Trigger tasks from Hookdeck webhooks" +sidebarTitle: "Hookdeck webhooks" +description: "This example demonstrates how to use Hookdeck to receive webhooks and trigger Trigger.dev tasks." +--- + +## Overview + +This example shows how to use [Hookdeck](https://hookdeck.com) as your webhook infrastructure to trigger Trigger.dev tasks. Hookdeck receives webhooks from external services, and forwards them directly to the Trigger.dev API. This gives you the best of both worlds: Hookdeck's webhook management, logging, and replay capabilities, combined with Trigger.dev's reliable task execution. + +## Key features + +- Use Hookdeck as your webhook endpoint for external services +- Hookdeck forwards webhooks directly to Trigger.dev tasks via the API +- All webhooks are logged and replayable in Hookdeck + +## Setting up Hookdeck + +You'll configure everything in the [Hookdeck dashboard](https://dashboard.hookdeck.com). No code changes needed in your app. + +### 1. Create a destination + +In Hookdeck, create a new [destination](https://hookdeck.com/docs/destinations) with the following settings: + +- **URL**: `https://api.trigger.dev/api/v1/tasks//trigger` (replace `` with your task ID) +- **Method**: POST +- **Authentication**: Bearer token (use your `TRIGGER_SECRET_KEY` from Trigger.dev) + +### 2. Add a transformation + +Create a [transformation](https://hookdeck.com/docs/transformations) to wrap the webhook body in the `payload` field that Trigger.dev expects: + +```javascript +addHandler("transform", (request, context) => { + request.body = { payload: { ...request.body } }; + return request; +}); +``` + +### 3. Create a connection + +Create a [connection](https://hookdeck.com/docs/connections) that links your source (where webhooks come from) to the destination and transformation you created above. + +## Task code + +This task will be triggered when Hookdeck forwards a webhook to the Trigger.dev API. + +```ts trigger/webhook-handler.ts +import { task } from "@trigger.dev/sdk"; + +export const webhookHandler = task({ + id: "webhook-handler", + run: async (payload: Record) => { + // The payload contains the original webhook data from the external service + console.log("Received webhook:", payload); + + // Add your custom logic here + }, +}); +``` + +## Testing your setup + +To test everything is working: + +1. Set up your destination, transformation, and connection in [Hookdeck](https://dashboard.hookdeck.com) +2. Send a test webhook to your Hookdeck source URL (use the Hookdeck Console or cURL) +3. Check the Hookdeck dashboard to verify the webhook was received and forwarded +4. Check the [Trigger.dev dashboard](https://cloud.trigger.dev) to see the successful run of your task + +For more information on setting up Hookdeck, refer to the [Hookdeck Documentation](https://hookdeck.com/docs). diff --git a/docs/guides/frameworks/webhooks-guides-overview.mdx b/docs/guides/frameworks/webhooks-guides-overview.mdx index 5e9703b53ef..4c0a4404276 100644 --- a/docs/guides/frameworks/webhooks-guides-overview.mdx +++ b/docs/guides/frameworks/webhooks-guides-overview.mdx @@ -31,6 +31,10 @@ A webhook handler is code that executes in response to an event. They can be end How to create a Stripe webhook handler and trigger a task when a 'checkout session completed' event is received. + + Use Hookdeck to receive webhooks and forward them to Trigger.dev tasks with logging and replay + capabilities. + Date: Wed, 4 Feb 2026 17:17:18 -0800 Subject: [PATCH 002/225] fix(webapp): ask ai button missing tooltip (#2964) Fixes - the tooltip not displaying on the AskAI button in the side menu - incorrect AskAI button heights - Small UI tweaks --- Open with Devin --------- Co-authored-by: Mihai Popescu --- apps/webapp/app/components/AskAI.tsx | 21 ++++++++++--------- apps/webapp/app/components/Shortcuts.tsx | 3 ++- .../app/components/navigation/SideMenu.tsx | 5 ++++- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/apps/webapp/app/components/AskAI.tsx b/apps/webapp/app/components/AskAI.tsx index bc55469b84a..814d4649c8f 100644 --- a/apps/webapp/app/components/AskAI.tsx +++ b/apps/webapp/app/components/AskAI.tsx @@ -118,30 +118,31 @@ function AskAIProvider({ websiteId, isCollapsed = false }: AskAIProviderProps) { -
- + + - -
+ + Ask AI - +
diff --git a/apps/webapp/app/components/Shortcuts.tsx b/apps/webapp/app/components/Shortcuts.tsx index e3e4d6fe957..a3fcd074988 100644 --- a/apps/webapp/app/components/Shortcuts.tsx +++ b/apps/webapp/app/components/Shortcuts.tsx @@ -76,7 +76,8 @@ function ShortcutContent() { - + + diff --git a/apps/webapp/app/components/navigation/SideMenu.tsx b/apps/webapp/app/components/navigation/SideMenu.tsx index 1e7d48cd57c..95282f15f5c 100644 --- a/apps/webapp/app/components/navigation/SideMenu.tsx +++ b/apps/webapp/app/components/navigation/SideMenu.tsx @@ -993,7 +993,10 @@ function CollapseToggle({ return (
{/* Vertical line to mask the side menu border */} -
+
From 283f88b20382ee993d32f9fea48490898526b078 Mon Sep 17 00:00:00 2001 From: Saadi Myftija Date: Thu, 5 Feb 2026 15:24:23 +0100 Subject: [PATCH 003/225] feat(webapp): add triggered via field to deployment details page (#2850) Display the deployment trigger source (CLI, CI/CD, Dashboard, GitHub Integration) with appropriate icons on the deployment details page. The triggeredVia field was already in the database but not displayed. Co-authored-by: Claude --- .../v3/DeploymentPresenter.server.ts | 2 + .../route.tsx | 110 +++++++++++++++++- 2 files changed, 111 insertions(+), 1 deletion(-) diff --git a/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts b/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts index ea59c657228..bc494c118aa 100644 --- a/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts @@ -156,6 +156,7 @@ export class DeploymentPresenter { }, }, buildServerMetadata: true, + triggeredVia: true, }, }); @@ -225,6 +226,7 @@ export class DeploymentPresenter { isBuilt: !!deployment.builtAt, type: deployment.type, git: gitMetadata, + triggeredVia: deployment.triggeredVia, }, }; } diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments.$deploymentParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments.$deploymentParam/route.tsx index aebc934ba38..9d32d89fd56 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments.$deploymentParam/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments.$deploymentParam/route.tsx @@ -3,7 +3,16 @@ import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { useEffect, useState, useRef, useCallback } from "react"; import { S2, S2Error } from "@s2-dev/streamstore"; -import { Clipboard, ClipboardCheck, ChevronDown, ChevronUp } from "lucide-react"; +import { + Clipboard, + ClipboardCheck, + ChevronDown, + ChevronUp, + TerminalSquareIcon, + LayoutDashboardIcon, + GitBranchIcon, + ServerIcon, +} from "lucide-react"; import { ExitIcon } from "~/assets/icons/ExitIcon"; import { GitMetadata } from "~/components/GitMetadata"; import { RuntimeIcon } from "~/components/RuntimeIcon"; @@ -73,6 +82,90 @@ type LogEntry = { level: "info" | "error" | "warn" | "debug"; }; +function getTriggeredViaDisplay(triggeredVia: string | null | undefined): { + icon: React.ReactNode; + label: string; +} | null { + if (!triggeredVia) return null; + + const iconClass = "size-4 text-text-dimmed"; + + switch (triggeredVia) { + case "cli:manual": + return { + icon: , + label: "CLI (Manual)", + }; + case "cli:github_actions": + return { + icon: , + label: "CLI (GitHub Actions)", + }; + case "cli:gitlab_ci": + return { + icon: , + label: "CLI (GitLab CI)", + }; + case "cli:circleci": + return { + icon: , + label: "CLI (CircleCI)", + }; + case "cli:jenkins": + return { + icon: , + label: "CLI (Jenkins)", + }; + case "cli:azure_pipelines": + return { + icon: , + label: "CLI (Azure Pipelines)", + }; + case "cli:bitbucket_pipelines": + return { + icon: , + label: "CLI (Bitbucket Pipelines)", + }; + case "cli:travis_ci": + return { + icon: , + label: "CLI (Travis CI)", + }; + case "cli:buildkite": + return { + icon: , + label: "CLI (Buildkite)", + }; + case "cli:ci_other": + return { + icon: , + label: "CLI (CI)", + }; + case "git_integration:github": + return { + icon: , + label: "GitHub Integration", + }; + case "dashboard": + return { + icon: , + label: "Dashboard", + }; + default: + // Handle any unknown values gracefully + if (triggeredVia.startsWith("cli:")) { + return { + icon: , + label: `CLI (${triggeredVia.replace("cli:", "")})`, + }; + } + return { + icon: , + label: triggeredVia, + }; + } +} + export default function Page() { const { deployment, eventStream } = useTypedLoaderData(); const organization = useOrganization(); @@ -408,6 +501,21 @@ export default function Page() { )} + + Triggered via + + {(() => { + const display = getTriggeredViaDisplay(deployment.triggeredVia); + if (!display) return "–"; + return ( + + {display.icon} + {display.label} + + ); + })()} + +
From 3bb9aac01405b259003f4a223f1bc87b4c63338e Mon Sep 17 00:00:00 2001 From: James Ritchie Date: Thu, 5 Feb 2026 07:45:40 -0800 Subject: [PATCH 004/225] Fix(webapp): Prevent big numbers on Queue page from jumping around when animating (#3007) --- .../route.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues/route.tsx index 3a8a7544c5b..3ea70e1e18a 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues/route.tsx @@ -364,14 +364,14 @@ export default function Page() { />
} - valueClassName={env.paused ? "text-warning" : undefined} + valueClassName={cn(env.paused ? "text-warning" : undefined, "tabular-nums")} compactThreshold={1000000} /> From b96a0b70d49b04d3a29b803b720a3dac6be7e8c1 Mon Sep 17 00:00:00 2001 From: DKP <8297864+D-K-P@users.noreply.github.com> Date: Thu, 5 Feb 2026 23:22:44 +0000 Subject: [PATCH 005/225] Docs: Clarify AI tool compatibility and expand context snippet (#3000) This pull request overhauls the "Building with AI" documentation section. It includes a comprehensive restructuring of the main building-with-ai page with new setup guides and troubleshooting sections, reorganizes the navigation hierarchy to elevate mcp-agent-rules as a top-level page, and updates multiple documentation pages to clarify the relationships between three AI tools: Skills, Agent Rules, and MCP Server. Changes also include formatting improvements, such as replacing italicized text with inline code formatting, and consistent additions of explanatory Note blocks and CardGroup components across related pages. --- docs/building-with-ai.mdx | 275 ++++++++++++++++++++++++++++++++++++-- docs/docs.json | 5 +- docs/mcp-agent-rules.mdx | 23 +++- docs/mcp-introduction.mdx | 12 +- docs/mcp-tools.mdx | 60 ++++----- docs/skills.mdx | 12 +- 6 files changed, 334 insertions(+), 53 deletions(-) diff --git a/docs/building-with-ai.mdx b/docs/building-with-ai.mdx index ba8cd5bb47c..e551324cadf 100644 --- a/docs/building-with-ai.mdx +++ b/docs/building-with-ai.mdx @@ -4,21 +4,272 @@ sidebarTitle: "Overview" description: "Tools and resources for building Trigger.dev projects with AI coding assistants." --- -We provide tools to help you build Trigger.dev projects with AI coding assistants. We recommend using them for the best developer experience. +## Quick setup - - - Give your AI assistant direct access to Trigger.dev tools - search docs, trigger tasks, deploy projects, and monitor runs. +We provide multiple tools to help AI coding assistants write correct Trigger.dev code. Use one or all of them for the best developer experience. + + + + + + Give your AI assistant direct access to Trigger.dev tools — search docs, trigger tasks, deploy projects, and monitor runs. Works with Claude Code, Cursor, Windsurf, VS Code (Copilot), and Zed. + + ```bash + npx trigger.dev@latest install-mcp + ``` + + [Learn more →](/mcp-introduction) + + + + Portable instruction sets that teach any AI coding assistant Trigger.dev best practices. Works with Claude Code, Cursor, Windsurf, VS Code (Copilot), and any tool that supports the [Agent Skills standard](https://agentskills.io). + + ```bash + npx skills add triggerdotdev/skills + ``` + + [Learn more →](/skills) + + + + Comprehensive rule sets installed directly into your AI client's config files. Works with Cursor, Claude Code, VS Code (Copilot), Windsurf, Gemini CLI, Cline, and more. Claude Code also gets a dedicated subagent for hands-on help. + + ```bash + npx trigger.dev@latest install-rules + ``` + + [Learn more →](/mcp-agent-rules) + + + + +## Skills vs Agent Rules vs MCP + +Not sure which tool to use? Here's how they compare: + +| | **Skills** | **Agent Rules** | **MCP Server** | +|:--|:-----------|:----------------|:---------------| +| **What it does** | Drops skill files into your project | Installs rule sets into client config | Runs a live server your AI connects to | +| **Installs to** | `.claude/skills/`, `.cursor/skills/`, etc. | `.cursor/rules/`, `CLAUDE.md`, `AGENTS.md`, etc. | `mcp.json`, `~/.claude.json`, etc. | +| **Updates** | Re-run `npx skills add` | Re-run `npx trigger.dev@latest install-rules` or auto-prompted on `trigger dev` | Always latest (uses `@latest`) | +| **Best for** | Teaching patterns and best practices | Comprehensive code generation guidance | Live project interaction (deploy, trigger, monitor) | +| **Works offline** | Yes | Yes | No (calls Trigger.dev API) | + +**Our recommendation:** Install all three. Skills and Agent Rules teach your AI *how* to write code. The MCP Server lets it *do things* in your project. + +## Project-level context snippet + +If you prefer a lightweight/passive approach, paste the snippet below into a context file at the root of your project. Different AI tools read different files: + +| File | Read by | +|:-----|:--------| +| `CLAUDE.md` | Claude Code | +| `AGENTS.md` | OpenAI Codex, Jules, OpenCode | +| `.cursor/rules/*.md` | Cursor | +| `.github/copilot-instructions.md` | GitHub Copilot | +| `CONVENTIONS.md` | Windsurf, Cline, and others | + +Create the file that matches your AI tool (or multiple files if your team uses different tools) and paste the snippet below. This gives the AI essential Trigger.dev context without installing anything. + + + +````markdown +# Trigger.dev rules + +## Imports + +Always import from `@trigger.dev/sdk` — never from `@trigger.dev/sdk/v3` or use the deprecated `client.defineJob` pattern. + +## Task pattern + +Every task must be exported. Use `task()` from `@trigger.dev/sdk`: + +```ts +import { task } from "@trigger.dev/sdk"; + +export const myTask = task({ + id: "my-task", + retry: { + maxAttempts: 3, + factor: 1.8, + minTimeoutInMs: 500, + maxTimeoutInMs: 30_000, + }, + run: async (payload: { url: string }) => { + // No timeouts — runs can take as long as needed + return { success: true }; + }, +}); +``` + +## Triggering tasks + +From your backend (Next.js route, Express handler, etc.): + +```ts +import type { myTask } from "./trigger/my-task"; +import { tasks } from "@trigger.dev/sdk"; + +// Fire and forget +const handle = await tasks.trigger("my-task", { url: "https://example.com" }); + +// Batch trigger (up to 1,000 items) +const batchHandle = await tasks.batchTrigger("my-task", [ + { payload: { url: "https://example.com/1" } }, + { payload: { url: "https://example.com/2" } }, +]); +``` + +### From inside other tasks + +```ts +export const parentTask = task({ + id: "parent-task", + run: async (payload) => { + // Fire and forget + await childTask.trigger({ data: "value" }); + + // Wait for result — returns a Result object, NOT the output directly + const result = await childTask.triggerAndWait({ data: "value" }); + if (result.ok) { + console.log(result.output); // The actual return value + } else { + console.error(result.error); + } - ```bash - npx trigger.dev@latest install-mcp - ``` + // Or use .unwrap() to get output directly (throws on failure) + const output = await childTask.triggerAndWait({ data: "value" }).unwrap(); + }, +}); +``` + +> Never wrap `triggerAndWait` or `batchTriggerAndWait` in `Promise.all` — this is not supported. + +## Error handling + +```ts +import { task, retry, AbortTaskRunError } from "@trigger.dev/sdk"; + +export const resilientTask = task({ + id: "resilient-task", + retry: { maxAttempts: 5 }, + run: async (payload) => { + // Permanent error — skip retrying + if (!payload.isValid) { + throw new AbortTaskRunError("Invalid payload, will not retry"); + } + + // Retry a specific block (not the whole task) + const data = await retry.onThrow( + async () => await fetchExternalApi(payload), + { maxAttempts: 3 } + ); + + return data; + }, +}); +``` + +## Schema validation + +Use `schemaTask` with Zod for payload validation: + +```ts +import { schemaTask } from "@trigger.dev/sdk"; +import { z } from "zod"; + +export const processVideo = schemaTask({ + id: "process-video", + schema: z.object({ videoUrl: z.string().url() }), + run: async (payload) => { + // payload is typed and validated + }, +}); +``` + +## Waits + +Use `wait.for` for delays, `wait.until` for dates, and `wait.forToken` for external callbacks: + +```ts +import { wait } from "@trigger.dev/sdk"; +await wait.for({ seconds: 30 }); +await wait.until({ date: new Date("2025-01-01") }); +``` + +## Configuration + +`trigger.config.ts` lives at the project root: + +```ts +import { defineConfig } from "@trigger.dev/sdk/build"; + +export default defineConfig({ + project: "", + dirs: ["./trigger"], +}); +``` + +## Common mistakes + +1. **Forgetting to export tasks** — every task must be a named export +2. **Importing from `@trigger.dev/sdk/v3`** — this is the old v3 path; always use `@trigger.dev/sdk` +3. **Using `client.defineJob()`** — this is the deprecated v2 API +4. **Calling `task.trigger()` directly** — use `tasks.trigger("task-id", payload)` from your backend +5. **Using `triggerAndWait` result as output** — it returns a `Result` object; check `result.ok` then access `result.output`, or use `.unwrap()` +6. **Wrapping waits/triggerAndWait in `Promise.all`** — not supported in Trigger.dev tasks +7. **Adding timeouts to tasks** — tasks have no built-in timeout; use `maxDuration` in config if needed +```` + + + +## llms.txt + +We also publish machine-readable documentation for LLM consumption: + +- [trigger.dev/docs/llms.txt](https://trigger.dev/docs/llms.txt) — concise overview +- [trigger.dev/docs/llms-full.txt](https://trigger.dev/docs/llms-full.txt) — full documentation + +These follow the [llms.txt standard](https://llmstxt.org) and can be fed directly into any LLM context window. + + +## Troubleshooting + + + + + Install [Agent Rules](/mcp-agent-rules) or [Skills](/skills) — they override the outdated patterns in the AI's training data. The [context snippet](#project-level-context-snippet) above is a quick alternative. + + + + 1. Make sure you've restarted your AI client after adding the config + 2. Run `npx trigger.dev@latest install-mcp` again — it will detect and fix common issues + 3. Check that `npx trigger.dev@latest mcp` runs without errors in your terminal + 4. See the [MCP introduction](/mcp-introduction) for client-specific config details + + + + All three if possible. If you can only pick one: + - **Agent Rules** if you want the broadest code generation improvement + - **Skills** if you use multiple AI tools and want a single install + - **MCP Server** if you need to trigger tasks, deploy, and search docs from your AI + + + + +## Next steps + + + + Install and configure the MCP Server for live project interaction. - Portable instruction sets that teach any AI coding assistant Trigger.dev best practices for writing tasks, configs, and more. - - ```bash - npx skills add triggerdotdev/skills - ``` + Portable instruction sets for any AI coding assistant. + + + Comprehensive rule sets installed into your AI client. + + + Learn the task patterns your AI assistant will follow. diff --git a/docs/docs.json b/docs/docs.json index 324052905ac..4ec2fafc0eb 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -49,9 +49,10 @@ "building-with-ai", { "group": "MCP Server", - "pages": ["mcp-introduction", "mcp-tools", "mcp-agent-rules"] + "pages": ["mcp-introduction", "mcp-tools"] }, - "skills" + "skills", + "mcp-agent-rules" ] }, { diff --git a/docs/mcp-agent-rules.mdx b/docs/mcp-agent-rules.mdx index 321f312a842..d9ba021b891 100644 --- a/docs/mcp-agent-rules.mdx +++ b/docs/mcp-agent-rules.mdx @@ -1,13 +1,17 @@ --- title: "Agent rules" sidebarTitle: "Agent rules" -description: "Learn how to use the Trigger.dev agent rules with the MCP server" +description: "Install Trigger.dev agent rules to guide AI assistants toward correct, up-to-date code patterns." --- ## What are Trigger.dev agent rules? Trigger.dev agent rules are comprehensive instruction sets that guide AI assistants to write optimal Trigger.dev code. These rules ensure your AI assistant understands best practices, current APIs, and recommended patterns when working with Trigger.dev projects. + + Agent Rules are one of three AI tools we provide. You can also install [Skills](/skills) for portable cross-editor instruction sets or the [MCP Server](/mcp-introduction) for live project interaction. See the [comparison table](/building-with-ai#skills-vs-agent-rules-vs-mcp) for details. + + ## Installation Install the agent rules with the following command: @@ -112,6 +116,17 @@ npx trigger.dev@latest install-rules ## Next steps -- [Install the MCP server](/mcp-introduction) for complete Trigger.dev integration -- [Explore MCP tools](/mcp-tools) for project management and task execution - + + + Portable instruction sets that work across all AI coding assistants. + + + Give your AI assistant direct access to Trigger.dev tools and APIs. + + + See all AI tools and how they compare. + + + Learn the task patterns that agent rules teach your AI assistant. + + diff --git a/docs/mcp-introduction.mdx b/docs/mcp-introduction.mdx index 257522d5792..a00f3dda896 100644 --- a/docs/mcp-introduction.mdx +++ b/docs/mcp-introduction.mdx @@ -361,4 +361,14 @@ Once installed, you can start using the MCP server by asking your AI assistant q ## Next Steps -- [Explore available MCP tools](/mcp-tools) + + + Explore all available MCP tools for managing your projects. + + + Portable instruction sets that teach AI assistants Trigger.dev patterns. + + + Install comprehensive rule sets directly into your AI client. + + diff --git a/docs/mcp-tools.mdx b/docs/mcp-tools.mdx index 058a3671ab7..fdcdb56f3d6 100644 --- a/docs/mcp-tools.mdx +++ b/docs/mcp-tools.mdx @@ -11,9 +11,9 @@ description: "Learn about how to use the tools available in the Trigger.dev MCP Search the Trigger.dev documentation for guides, examples, and API references. **Example usage:** -- _"How do I create a scheduled task?"_ -- _"Show me webhook examples"_ -- _"What are the deployment options?"_ +- `"How do I create a scheduled task?"` +- `"Show me webhook examples"` +- `"What are the deployment options?"` ## Project Management Tools @@ -22,32 +22,32 @@ Search the Trigger.dev documentation for guides, examples, and API references. List all organizations you have access to. **Example usage:** -- _"What organizations do I have?"_ -- _"Show me my orgs"_ +- `"What organizations do I have?"` +- `"Show me my orgs"` ### list_projects List all projects in your Trigger.dev account. **Example usage:** -- _"What projects do I have?"_ -- _"List my Trigger.dev projects"_ +- `"What projects do I have?"` +- `"List my Trigger.dev projects"` ### create_project_in_org Create a new project in an organization. **Example usage:** -- _"Create a new project called 'my-app'"_ -- _"Set up a new Trigger.dev project"_ +- `"Create a new project called 'my-app'"` +- `"Set up a new Trigger.dev project"` ### initialize_project Initialize Trigger.dev in your project with automatic setup and configuration. **Example usage:** -- _"Set up Trigger.dev in this project"_ -- _"Add Trigger.dev to my app"_ +- `"Set up Trigger.dev in this project"` +- `"Add Trigger.dev to my app"` ## Task Management Tools @@ -56,17 +56,17 @@ Initialize Trigger.dev in your project with automatic setup and configuration. Get the current worker for a project, including the worker version, SDK version, and registered tasks with their payload schemas. **Example usage:** -- _"What tasks are available?"_ -- _"Show me the tasks in dev"_ +- `"What tasks are available?"` +- `"Show me the tasks in dev"` ### trigger_task Trigger a task to run with a specific payload. You can add a delay, set tags, configure retries, choose a machine size, set a TTL, or use an idempotency key. **Example usage:** -- _"Run the email-notification task"_ -- _"Trigger my-task with userId 123"_ -- _"Execute the sync task in production"_ +- `"Run the email-notification task"` +- `"Trigger my-task with userId 123"` +- `"Execute the sync task in production"` ## Run Monitoring Tools @@ -75,32 +75,32 @@ Trigger a task to run with a specific payload. You can add a delay, set tags, co Get detailed information about a specific task run, including logs and status. Enable debug mode to get the full trace with all logs and spans. **Example usage:** -- _"Show me details for run run_abc123"_ -- _"Why did this run fail?"_ +- `"Show me details for run run_abc123"` +- `"Why did this run fail?"` ### list_runs List runs for a project. Filter by status, task, tags, version, machine size, or time period. **Example usage:** -- _"Show me recent runs"_ -- _"List failed runs from the last 7 days"_ -- _"What runs are currently executing?"_ +- `"Show me recent runs"` +- `"List failed runs from the last 7 days"` +- `"What runs are currently executing?"` ### wait_for_run_to_complete Wait for a specific run to finish and return the result. **Example usage:** -- _"Wait for run run_abc123 to complete"_ +- `"Wait for run run_abc123 to complete"` ### cancel_run Cancel a running or queued run. **Example usage:** -- _"Cancel run run_abc123"_ -- _"Stop that task"_ +- `"Cancel run run_abc123"` +- `"Stop that task"` ## Deployment Tools @@ -109,24 +109,24 @@ Cancel a running or queued run. Deploy your project to staging or production. **Example usage:** -- _"Deploy to production"_ -- _"Deploy to staging"_ +- `"Deploy to production"` +- `"Deploy to staging"` ### list_deploys List deployments for a project. Filter by status or time period. **Example usage:** -- _"Show me recent deployments"_ -- _"What's deployed to production?"_ +- `"Show me recent deployments"` +- `"What's deployed to production?"` ### list_preview_branches List all preview branches in the project. **Example usage:** -- _"What preview branches exist?"_ -- _"Show me preview deployments"_ +- `"What preview branches exist?"` +- `"Show me preview deployments"` The deploy and list_preview_branches tools are not available when the MCP server is running with the `--dev-only` flag. diff --git a/docs/skills.mdx b/docs/skills.mdx index eb4add47952..f12c5c36eb6 100644 --- a/docs/skills.mdx +++ b/docs/skills.mdx @@ -7,7 +7,11 @@ tag: "new" ## What are agent skills? -Skills are portable instruction sets that teach AI coding assistants how to use Trigger.dev effectively. Unlike vendor-specific config files (`.cursor/rules`, `CLAUDE.md`), skills use an open standard that works across all major AI assistants. For example, Cursor users and Claude Code users can get the same knowledge from a single install. +Skills are portable instruction sets that teach AI coding assistants how to use Trigger.dev effectively. Unlike vendor-specific config files (`.cursor/rules`, `CLAUDE.md`), skills use an open standard that works across all major AI assistants. For example, Cursor users and Claude Code users can get the same knowledge from a single install. + + + Skills are one of three AI tools we provide. You can also install [Agent Rules](/mcp-agent-rules) for client-specific rule sets or the [MCP Server](/mcp-introduction) for live project interaction. See the [comparison table](/building-with-ai#skills-vs-agent-rules-vs-mcp) for details. + Skills are installed as directories containing a `SKILL.md` file. Each `SKILL.md` includes YAML frontmatter (name, description) and markdown instructions with patterns, examples, and best practices that AI assistants automatically discover and follow. @@ -68,15 +72,15 @@ Skills work with any AI coding assistant that supports the [Agent Skills standar ## Next steps + + Install comprehensive rule sets directly into your AI client. + Give your AI assistant direct access to Trigger.dev tools and APIs. Learn the task patterns that skills teach your AI assistant. - - Build durable AI workflows with prompt chaining and human-in-the-loop. - Browse the full Agent Skills ecosystem. From e536d35b1717033a638b4245a8e7c17c5e5ba6ab Mon Sep 17 00:00:00 2001 From: nicktrn <55853254+nicktrn@users.noreply.github.com> Date: Fri, 6 Feb 2026 13:56:39 +0000 Subject: [PATCH 006/225] fix(ci): fix docker image publishing and worker builds (#3013) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - **Fix Docker publish automation**: The `v.docker.*` tags pushed by the release workflow using `GITHUB_TOKEN` don't trigger the publish workflow (GitHub Actions limitation to prevent infinite loops). Added a `workflow_call` to `publish.yml` directly from the release job so Docker images are built automatically after npm publish. Tags are still pushed for reference. - **Fix worker Containerfiles**: The coordinator, docker-provider, and kubernetes-provider builds have been failing since the superjson vendoring change in `@trigger.dev/core` (#2949). The Containerfiles now run `bundle-vendor` before `build:bundle` to generate the vendor files that esbuild needs. ### Context - Docker images on GHCR have been stuck at v4.3.0 — v4.3.1, v4.3.2, v4.3.3 tags existed on GitHub but never triggered publish runs - The worker builds (publish-worker) have been failing on every push to main since Jan 30 ## Test plan - [x] Verified kubernetes-provider Containerfile builds locally with the fix - [x] Manually dispatched publish workflow for v4.3.1 — all jobs succeeded --- Open with Devin --- .github/workflows/release.yml | 12 +++++++++++- apps/coordinator/Containerfile | 2 +- apps/docker-provider/Containerfile | 2 +- apps/kubernetes-provider/Containerfile | 2 +- 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ca0f0ebf16b..3b4135ec099 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -122,7 +122,6 @@ jobs: package_version=$(echo '${{ steps.changesets.outputs.publishedPackages }}' | jq -r '.[0].version') echo "package_version=${package_version}" >> "$GITHUB_OUTPUT" - # this triggers the publish workflow for the docker images - name: Create and push Docker tag if: steps.changesets.outputs.published == 'true' run: | @@ -130,6 +129,17 @@ jobs: git tag "v.docker.${{ steps.get_version.outputs.package_version }}" git push origin "v.docker.${{ steps.get_version.outputs.package_version }}" + # Trigger Docker builds directly via workflow_call since tags pushed with + # GITHUB_TOKEN don't trigger other workflows (GitHub Actions limitation). + publish-docker: + name: 🐳 Publish Docker images + needs: release + if: needs.release.outputs.published == 'true' + uses: ./.github/workflows/publish.yml + secrets: inherit + with: + image_tag: v${{ needs.release.outputs.published_package_version }} + # The prerelease job needs to be on the same workflow file due to a limitation related to how npm verifies OIDC claims. prerelease: name: 🧪 Prerelease diff --git a/apps/coordinator/Containerfile b/apps/coordinator/Containerfile index 4e7b89e0af1..9e973675ab9 100644 --- a/apps/coordinator/Containerfile +++ b/apps/coordinator/Containerfile @@ -35,7 +35,7 @@ COPY --from=pruner --chown=node:node /app/out/full/ . COPY --from=dev-deps --chown=node:node /app/ . COPY --chown=node:node turbo.json turbo.json -RUN pnpm run -r --filter coordinator build:bundle +RUN pnpm run -r --filter @trigger.dev/core bundle-vendor && pnpm run -r --filter coordinator build:bundle FROM alpine AS cri-tools diff --git a/apps/docker-provider/Containerfile b/apps/docker-provider/Containerfile index bea730bda80..42a7ac23092 100644 --- a/apps/docker-provider/Containerfile +++ b/apps/docker-provider/Containerfile @@ -31,7 +31,7 @@ COPY --from=pruner --chown=node:node /app/out/full/ . COPY --from=dev-deps --chown=node:node /app/ . COPY --chown=node:node turbo.json turbo.json -RUN pnpm run -r --filter docker-provider build:bundle +RUN pnpm run -r --filter @trigger.dev/core bundle-vendor && pnpm run -r --filter docker-provider build:bundle FROM base AS runner diff --git a/apps/kubernetes-provider/Containerfile b/apps/kubernetes-provider/Containerfile index fb96304c26b..b46b9943275 100644 --- a/apps/kubernetes-provider/Containerfile +++ b/apps/kubernetes-provider/Containerfile @@ -31,7 +31,7 @@ COPY --from=pruner --chown=node:node /app/out/full/ . COPY --from=dev-deps --chown=node:node /app/ . COPY --chown=node:node turbo.json turbo.json -RUN pnpm run -r --filter kubernetes-provider build:bundle +RUN pnpm run -r --filter @trigger.dev/core bundle-vendor && pnpm run -r --filter kubernetes-provider build:bundle FROM base AS runner From 9b21f8d322c5d802ddd8cd848002c4d0b9afbb33 Mon Sep 17 00:00:00 2001 From: Oskar Otwinowski Date: Tue, 10 Feb 2026 10:37:09 +0100 Subject: [PATCH 007/225] feat(webapp): Vercel integration (#2994) Vercel integration Desc + Vid coming soon For human reviewer: - check the db schema - check if posthog user attribution call is correct (telemetry.server.ts & `referralSource`) --- Open with Devin --- .changeset/vercel-integration.md | 5 + .vscode/settings.json | 3 +- .../app/components/GitHubLoginButton.tsx | 2 - .../environments/RegenerateApiKeyModal.tsx | 30 +- .../integrations/VercelBuildSettings.tsx | 176 ++ .../components/integrations/VercelLogo.tsx | 12 + .../integrations/VercelOnboardingModal.tsx | 1085 +++++++++++ .../OrganizationSettingsSideMenu.tsx | 9 + apps/webapp/app/env.server.ts | 5 + .../app/models/orgIntegration.server.ts | 24 + .../app/models/vercelIntegration.server.ts | 1659 +++++++++++++++++ .../presenters/v3/ApiKeysPresenter.server.ts | 16 + .../v3/DeploymentListPresenter.server.ts | 59 +- .../EnvironmentVariablesPresenter.server.ts | 92 +- .../v3/VercelSettingsPresenter.server.ts | 585 ++++++ .../route.tsx | 7 +- .../route.tsx | 28 +- .../route.tsx | 8 +- .../route.tsx | 230 ++- .../route.tsx | 162 +- ...ationSlug.settings.integrations.vercel.tsx | 375 ++++ .../route.tsx | 45 + .../webapp/app/routes/_app.orgs.new/route.tsx | 21 + .../api.v1.deployments.$deploymentId.ts | 12 + ....projects.$projectParam.vercel.projects.ts | 147 ++ ...ojects.$projectRef.envvars.$slug.import.ts | 4 + .../app/routes/auth.github.callback.tsx | 5 +- .../app/routes/auth.google.callback.tsx | 5 +- .../app/routes/confirm-basic-details.tsx | 20 +- apps/webapp/app/routes/login._index/route.tsx | 2 +- apps/webapp/app/routes/login.magic/route.tsx | 15 +- apps/webapp/app/routes/login.mfa/route.tsx | 13 +- apps/webapp/app/routes/magic.tsx | 3 + ...ents.$environmentId.regenerate-api-key.tsx | 38 + ...cts.$projectParam.env.$envParam.github.tsx | 36 +- ...cts.$projectParam.env.$envParam.vercel.tsx | 926 +++++++++ apps/webapp/app/routes/vercel.callback.ts | 78 + apps/webapp/app/routes/vercel.configure.tsx | 52 + apps/webapp/app/routes/vercel.connect.tsx | 170 ++ apps/webapp/app/routes/vercel.install.tsx | 73 + apps/webapp/app/routes/vercel.onboarding.tsx | 465 +++++ apps/webapp/app/services/org.server.ts | 20 + apps/webapp/app/services/postAuth.server.ts | 5 +- .../app/services/referralSource.server.ts | 53 + apps/webapp/app/services/telemetry.server.ts | 32 +- .../app/services/vercelIntegration.server.ts | 656 +++++++ apps/webapp/app/utils/pathBuilder.ts | 24 + .../environmentVariablesRepository.server.ts | 147 +- .../app/v3/environmentVariables/repository.ts | 18 + .../v3/services/alerts/deliverAlert.server.ts | 3 +- .../services/initializeDeployment.server.ts | 1 + apps/webapp/app/v3/vercel/index.ts | 17 + .../app/v3/vercel/vercelOAuthState.server.ts | 40 + .../vercel/vercelProjectIntegrationSchema.ts | 225 +++ .../webapp/app/v3/vercel/vercelUrls.server.ts | 26 + apps/webapp/package.json | 1 + apps/webapp/test/vercelUrls.test.ts | 56 + .../migration.sql | 3 + .../migration.sql | 29 + .../migration.sql | 22 + .../migration.sql | 9 + .../migration.sql | 3 + .../migration.sql | 3 + .../database/prisma/schema.prisma | 113 +- packages/core/src/v3/schemas/api.ts | 18 + pnpm-lock.yaml | 335 +++- 66 files changed, 8388 insertions(+), 173 deletions(-) create mode 100644 .changeset/vercel-integration.md create mode 100644 apps/webapp/app/components/integrations/VercelBuildSettings.tsx create mode 100644 apps/webapp/app/components/integrations/VercelLogo.tsx create mode 100644 apps/webapp/app/components/integrations/VercelOnboardingModal.tsx create mode 100644 apps/webapp/app/models/vercelIntegration.server.ts create mode 100644 apps/webapp/app/presenters/v3/VercelSettingsPresenter.server.ts create mode 100644 apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.integrations.vercel.tsx create mode 100644 apps/webapp/app/routes/api.v1.orgs.$organizationSlug.projects.$projectParam.vercel.projects.ts create mode 100644 apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel.tsx create mode 100644 apps/webapp/app/routes/vercel.callback.ts create mode 100644 apps/webapp/app/routes/vercel.configure.tsx create mode 100644 apps/webapp/app/routes/vercel.connect.tsx create mode 100644 apps/webapp/app/routes/vercel.install.tsx create mode 100644 apps/webapp/app/routes/vercel.onboarding.tsx create mode 100644 apps/webapp/app/services/org.server.ts create mode 100644 apps/webapp/app/services/referralSource.server.ts create mode 100644 apps/webapp/app/services/vercelIntegration.server.ts create mode 100644 apps/webapp/app/v3/vercel/index.ts create mode 100644 apps/webapp/app/v3/vercel/vercelOAuthState.server.ts create mode 100644 apps/webapp/app/v3/vercel/vercelProjectIntegrationSchema.ts create mode 100644 apps/webapp/app/v3/vercel/vercelUrls.server.ts create mode 100644 apps/webapp/test/vercelUrls.test.ts create mode 100644 internal-packages/database/prisma/migrations/20260126175159_add_environment_variable_versioning/migration.sql create mode 100644 internal-packages/database/prisma/migrations/20260129162621_add_organization_project_integration/migration.sql create mode 100644 internal-packages/database/prisma/migrations/20260129162810_add_integration_deployment/migration.sql create mode 100644 internal-packages/database/prisma/migrations/20260129162946_alter_tables_for_integrations_data/migration.sql create mode 100644 internal-packages/database/prisma/migrations/20260129165555_add_organization_integration_idx/migration.sql create mode 100644 internal-packages/database/prisma/migrations/20260129165809_add_worker_deployment_idx/migration.sql diff --git a/.changeset/vercel-integration.md b/.changeset/vercel-integration.md new file mode 100644 index 00000000000..8b638e36431 --- /dev/null +++ b/.changeset/vercel-integration.md @@ -0,0 +1,5 @@ +--- +"@trigger.dev/core": patch +--- + +Add Vercel integration support to API schemas: `commitSHA` and `integrationDeployments` on deployment responses, and `source` field for environment variable imports. diff --git a/.vscode/settings.json b/.vscode/settings.json index 12aefeb358f..382a5ae6201 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -7,5 +7,6 @@ "packages/cli-v3/e2e": true }, "vitest.disableWorkspaceWarning": true, - "typescript.experimental.useTsgo": false + "typescript.experimental.useTsgo": true, + "chat.agent.maxRequests": 10000 } diff --git a/apps/webapp/app/components/GitHubLoginButton.tsx b/apps/webapp/app/components/GitHubLoginButton.tsx index 87238db087e..76a494927cd 100644 --- a/apps/webapp/app/components/GitHubLoginButton.tsx +++ b/apps/webapp/app/components/GitHubLoginButton.tsx @@ -32,8 +32,6 @@ export function OctoKitty({ className }: { className?: string }) { baseProfile="tiny" id="Layer_1" xmlns="http://www.w3.org/2000/svg" - x="0px" - y="0px" viewBox="0 0 2350 2314.8" xmlSpace="preserve" fill="currentColor" diff --git a/apps/webapp/app/components/environments/RegenerateApiKeyModal.tsx b/apps/webapp/app/components/environments/RegenerateApiKeyModal.tsx index fb0c77ca7c7..439fd892f91 100644 --- a/apps/webapp/app/components/environments/RegenerateApiKeyModal.tsx +++ b/apps/webapp/app/components/environments/RegenerateApiKeyModal.tsx @@ -10,11 +10,14 @@ import { FormButtons } from "../primitives/FormButtons"; import { Input } from "../primitives/Input"; import { InputGroup } from "../primitives/InputGroup"; import { Paragraph } from "../primitives/Paragraph"; +import { CheckboxWithLabel } from "../primitives/Checkbox"; import { Spinner } from "../primitives/Spinner"; type ModalProps = { id: string; title: string; + hasVercelIntegration: boolean; + isDevelopment: boolean; }; type ModalContentProps = ModalProps & { @@ -22,7 +25,12 @@ type ModalContentProps = ModalProps & { closeModal: () => void; }; -export function RegenerateApiKeyModal({ id, title }: ModalProps) { +export function RegenerateApiKeyModal({ + id, + title, + hasVercelIntegration, + isDevelopment, +}: ModalProps) { const randomWord = generateTwoRandomWords(); const [open, setOpen] = useState(false); return ( @@ -37,6 +45,8 @@ export function RegenerateApiKeyModal({ id, title }: ModalProps) { setOpen(false)} /> @@ -45,7 +55,14 @@ export function RegenerateApiKeyModal({ id, title }: ModalProps) { ); } -const RegenerateApiKeyModalContent = ({ id, randomWord, title, closeModal }: ModalContentProps) => { +const RegenerateApiKeyModalContent = ({ + id, + randomWord, + title, + hasVercelIntegration, + isDevelopment, + closeModal, +}: ModalContentProps) => { const [confirmationText, setConfirmationText] = useState(""); const fetcher = useFetcher(); const isSubmitting = fetcher.state === "submitting"; @@ -83,6 +100,15 @@ const RegenerateApiKeyModalContent = ({ id, randomWord, title, closeModal }: Mod onChange={(e) => setConfirmationText(e.target.value)} /> + {hasVercelIntegration && !isDevelopment && ( + + )} void; + discoverEnvVars: EnvSlug[]; + onDiscoverEnvVarsChange: (slugs: EnvSlug[]) => void; + atomicBuilds: EnvSlug[]; + onAtomicBuildsChange: (slugs: EnvSlug[]) => void; + envVarsConfigLink?: string; +}; + +export function BuildSettingsFields({ + availableEnvSlugs, + pullEnvVarsBeforeBuild, + onPullEnvVarsChange, + discoverEnvVars, + onDiscoverEnvVarsChange, + atomicBuilds, + onAtomicBuildsChange, + envVarsConfigLink, +}: BuildSettingsFieldsProps) { + return ( + <> + {/* Pull env vars before build */} +
+
+
+ + + Select which environments should pull environment variables from Vercel before each + build.{" "} + {envVarsConfigLink && ( + <> + Configure which variables to pull. + + )} + +
+ {availableEnvSlugs.length > 1 && ( + 0 && + availableEnvSlugs.every((s) => pullEnvVarsBeforeBuild.includes(s)) + } + onCheckedChange={(checked) => { + onPullEnvVarsChange(checked ? [...availableEnvSlugs] : []); + }} + /> + )} +
+
+ {availableEnvSlugs.map((slug) => { + const envType = envSlugToType(slug); + return ( +
+
+ + + {environmentFullTitle({ type: envType })} + +
+ { + onPullEnvVarsChange( + checked + ? [...pullEnvVarsBeforeBuild, slug] + : pullEnvVarsBeforeBuild.filter((s) => s !== slug) + ); + }} + /> +
+ ); + })} +
+
+ + {/* Discover new env vars */} +
+
+
+ + + Select which environments should automatically discover and create new environment + variables from Vercel during builds. + +
+ {availableEnvSlugs.length > 1 && ( + 0 && + availableEnvSlugs.every( + (s) => discoverEnvVars.includes(s) || !pullEnvVarsBeforeBuild.includes(s) + ) && + availableEnvSlugs.some((s) => discoverEnvVars.includes(s)) + } + disabled={!availableEnvSlugs.some((s) => pullEnvVarsBeforeBuild.includes(s))} + onCheckedChange={(checked) => { + onDiscoverEnvVarsChange( + checked + ? availableEnvSlugs.filter((s) => pullEnvVarsBeforeBuild.includes(s)) + : [] + ); + }} + /> + )} +
+
+ {availableEnvSlugs.map((slug) => { + const envType = envSlugToType(slug); + const isPullDisabled = !pullEnvVarsBeforeBuild.includes(slug); + return ( +
+
+ + + {environmentFullTitle({ type: envType })} + +
+ { + onDiscoverEnvVarsChange( + checked + ? [...discoverEnvVars, slug] + : discoverEnvVars.filter((s) => s !== slug) + ); + }} + /> +
+ ); + })} +
+
+ + {/* Atomic deployments */} +
+
+
+ + + When enabled, production deployments wait for Vercel deployment to complete before + promoting the Trigger.dev deployment. + +
+ { + onAtomicBuildsChange(checked ? ["prod"] : []); + }} + /> +
+
+ + ); +} diff --git a/apps/webapp/app/components/integrations/VercelLogo.tsx b/apps/webapp/app/components/integrations/VercelLogo.tsx new file mode 100644 index 00000000000..7ddf039abfd --- /dev/null +++ b/apps/webapp/app/components/integrations/VercelLogo.tsx @@ -0,0 +1,12 @@ +export function VercelLogo({ className }: { className?: string }) { + return ( + + + + ); +} diff --git a/apps/webapp/app/components/integrations/VercelOnboardingModal.tsx b/apps/webapp/app/components/integrations/VercelOnboardingModal.tsx new file mode 100644 index 00000000000..c2a5bfec43a --- /dev/null +++ b/apps/webapp/app/components/integrations/VercelOnboardingModal.tsx @@ -0,0 +1,1085 @@ +import { + CheckCircleIcon, + ExclamationTriangleIcon, + ChevronDownIcon, + ChevronUpIcon, +} from "@heroicons/react/20/solid"; +import { + useFetcher, + useNavigation, + useSearchParams, +} from "@remix-run/react"; +import { useTypedFetcher } from "remix-typedjson"; +import { Dialog, DialogContent, DialogHeader } from "~/components/primitives/Dialog"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { Callout } from "~/components/primitives/Callout"; +import { FormButtons } from "~/components/primitives/FormButtons"; +import { FormError } from "~/components/primitives/FormError"; +import { Header3 } from "~/components/primitives/Headers"; +import { Hint } from "~/components/primitives/Hint"; +import { Label } from "~/components/primitives/Label"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import { Select, SelectItem } from "~/components/primitives/Select"; +import { SpinnerWhite } from "~/components/primitives/Spinner"; +import { Switch } from "~/components/primitives/Switch"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, + TooltipProvider, +} from "~/components/primitives/Tooltip"; +import { VercelLogo } from "~/components/integrations/VercelLogo"; +import { BuildSettingsFields } from "~/components/integrations/VercelBuildSettings"; +import { OctoKitty } from "~/components/GitHubLoginButton"; +import { + ConnectGitHubRepoModal, +} from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github"; +import { + type SyncEnvVarsMapping, + type EnvSlug, + ALL_ENV_SLUGS, + shouldSyncEnvVarForAnyEnvironment, + getAvailableEnvSlugs, + getAvailableEnvSlugsForBuildSettings, +} from "~/v3/vercel/vercelProjectIntegrationSchema"; +import { type VercelCustomEnvironment } from "~/models/vercelIntegration.server"; +import { type VercelOnboardingData } from "~/presenters/v3/VercelSettingsPresenter.server"; +import { vercelAppInstallPath, v3ProjectSettingsPath, githubAppInstallPath, vercelResourcePath } from "~/utils/pathBuilder"; +import type { loader } from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel"; +import { useEffect, useState, useCallback, useRef } from "react"; + +function safeRedirectUrl(url: string): string | null { + try { + const parsed = new URL(url, window.location.origin); + if (parsed.origin === window.location.origin) { + return parsed.toString(); + } + if (parsed.protocol === "https:" && /^([a-z0-9-]+\.)*vercel\.com$/i.test(parsed.hostname)) { + return parsed.toString(); + } + } catch { + // Invalid URL + } + return null; +} + +function formatVercelTargets(targets: string[]): string { + const targetLabels: Record = { + production: "Production", + preview: "Preview", + development: "Development", + staging: "Staging", + }; + + return targets + .map((t) => targetLabels[t.toLowerCase()] || t) + .join(", "); +} + +type OnboardingState = + | "idle" + | "installing" + | "loading-projects" + | "project-selection" + | "loading-env-mapping" + | "env-mapping" + | "loading-env-vars" + | "env-var-sync" + | "build-settings" + | "github-connection" + | "completed"; + +export function VercelOnboardingModal({ + isOpen, + onClose, + onboardingData, + organizationSlug, + projectSlug, + environmentSlug, + hasStagingEnvironment, + hasPreviewEnvironment, + hasOrgIntegration, + nextUrl, + onDataReload, +}: { + isOpen: boolean; + onClose: () => void; + onboardingData: VercelOnboardingData | null; + organizationSlug: string; + projectSlug: string; + environmentSlug: string; + hasStagingEnvironment: boolean; + hasPreviewEnvironment: boolean; + hasOrgIntegration: boolean; + nextUrl?: string; + onDataReload?: (vercelStagingEnvironment?: string) => void; +}) { + const navigation = useNavigation(); + const fetcher = useTypedFetcher(); + const envMappingFetcher = useFetcher(); + const completeOnboardingFetcher = useFetcher(); + const { Form: CompleteOnboardingForm } = completeOnboardingFetcher; + const [searchParams] = useSearchParams(); + const fromMarketplaceContext = searchParams.get("origin") === "marketplace"; + + const availableProjects = onboardingData?.availableProjects || []; + const hasProjectSelected = onboardingData?.hasProjectSelected ?? false; + const customEnvironments = onboardingData?.customEnvironments || []; + const envVars = onboardingData?.environmentVariables || []; + const existingVars = onboardingData?.existingVariables || {}; + const hasCustomEnvs = customEnvironments.length > 0 && hasStagingEnvironment; + + const computeInitialState = useCallback((): OnboardingState => { + if (!hasOrgIntegration || onboardingData?.authInvalid) { + return "idle"; + } + const projectSelected = onboardingData?.hasProjectSelected ?? false; + if (!projectSelected) { + if (!onboardingData?.availableProjects || onboardingData.availableProjects.length === 0) { + return "loading-projects"; + } + return "project-selection"; + } + // For marketplace origin, skip env-mapping step and go directly to env-var-sync + if (!fromMarketplaceContext) { + const customEnvs = (onboardingData?.customEnvironments?.length ?? 0) > 0 && hasStagingEnvironment; + if (customEnvs) { + return "env-mapping"; + } + } + if (!onboardingData?.environmentVariables || onboardingData.environmentVariables.length === 0) { + return "loading-env-vars"; + } + return "env-var-sync"; + }, [hasOrgIntegration, onboardingData, hasStagingEnvironment, fromMarketplaceContext]); + + const [state, setState] = useState(() => { + if (!isOpen) return "idle"; + return computeInitialState(); + }); + + const prevIsOpenRef = useRef(isOpen); + const hasSyncedStagingRef = useRef(false); + const hasSyncedPreviewRef = useRef(false); + useEffect(() => { + if (isOpen && !prevIsOpenRef.current) { + setState(computeInitialState()); + hasSyncedStagingRef.current = false; + hasSyncedPreviewRef.current = false; + } else if (isOpen && state === "idle") { + setState(computeInitialState()); + } + prevIsOpenRef.current = isOpen; + }, [isOpen, state, computeInitialState]); + + const [selectedVercelProject, setSelectedVercelProject] = useState<{ + id: string; + name: string; + } | null>(null); + const [vercelStagingEnvironment, setVercelStagingEnvironment] = useState<{ + environmentId: string; + displayName: string; + } | null>(null); + const availableEnvSlugsForOnboarding = getAvailableEnvSlugs(hasStagingEnvironment, hasPreviewEnvironment); + const availableEnvSlugsForOnboardingBuildSettings = getAvailableEnvSlugsForBuildSettings(hasStagingEnvironment, hasPreviewEnvironment); + const [pullEnvVarsBeforeBuild, setPullEnvVarsBeforeBuild] = useState( + () => availableEnvSlugsForOnboardingBuildSettings + ); + const [atomicBuilds, setAtomicBuilds] = useState( + () => ["prod"] + ); + const [discoverEnvVars, setDiscoverEnvVars] = useState( + () => availableEnvSlugsForOnboardingBuildSettings + ); + + // Sync pullEnvVarsBeforeBuild and discoverEnvVars when hasStagingEnvironment becomes true (once) + useEffect(() => { + if (hasStagingEnvironment && !hasSyncedStagingRef.current) { + hasSyncedStagingRef.current = true; + setPullEnvVarsBeforeBuild((prev) => { + if (!prev.includes("stg")) { + return [...prev, "stg"]; + } + return prev; + }); + setDiscoverEnvVars((prev) => { + if (!prev.includes("stg")) { + return [...prev, "stg"]; + } + return prev; + }); + } + }, [hasStagingEnvironment]); + + // Sync pullEnvVarsBeforeBuild and discoverEnvVars when hasPreviewEnvironment becomes true (once) + useEffect(() => { + if (hasPreviewEnvironment && !hasSyncedPreviewRef.current) { + hasSyncedPreviewRef.current = true; + setPullEnvVarsBeforeBuild((prev) => { + if (!prev.includes("preview")) { + return [...prev, "preview"]; + } + return prev; + }); + setDiscoverEnvVars((prev) => { + if (!prev.includes("preview")) { + return [...prev, "preview"]; + } + return prev; + }); + } + }, [hasPreviewEnvironment]); + const [syncEnvVarsMapping, setSyncEnvVarsMapping] = useState({}); + const [expandedEnvVars, setExpandedEnvVars] = useState(false); + const [expandedSecretEnvVars, setExpandedSecretEnvVars] = useState(false); + const [projectSelectionError, setProjectSelectionError] = useState(null); + const [isRedirecting, setIsRedirecting] = useState(false); + + const gitHubAppInstallations = onboardingData?.gitHubAppInstallations ?? []; + const isGitHubConnectedForOnboarding = onboardingData?.isGitHubConnected ?? false; + const isOnboardingComplete = onboardingData?.isOnboardingComplete ?? false; + + const hasTriggeredMarketplaceRedirectRef = useRef(false); + + // Auto-redirect for marketplace flow when returning from GitHub with everything complete + useEffect(() => { + if (hasTriggeredMarketplaceRedirectRef.current) { + return; + } + + if ( + isOpen && + fromMarketplaceContext && + nextUrl && + isOnboardingComplete && + isGitHubConnectedForOnboarding + ) { + hasTriggeredMarketplaceRedirectRef.current = true; + const validUrl = safeRedirectUrl(nextUrl); + if (validUrl) { + setTimeout(() => { + window.location.href = validUrl; + }, 100); + } + } + }, [isOpen, fromMarketplaceContext, nextUrl, isOnboardingComplete, isGitHubConnectedForOnboarding]); + + useEffect(() => { + if (!isOpen) { + hasTriggeredMarketplaceRedirectRef.current = false; + setIsRedirecting(false); + } + }, [isOpen]); + + const loadingStateRef = useRef(null); + + useEffect(() => { + if (!isOpen || state === "idle") { + loadingStateRef.current = null; + return; + } + + if (onboardingData?.authInvalid) { + onClose(); + return; + } + + if (loadingStateRef.current === state) { + return; + } + + switch (state) { + + case "loading-projects": + loadingStateRef.current = state; + if (onDataReload) { + onDataReload(); + } + break; + + case "loading-env-mapping": + loadingStateRef.current = state; + if (onDataReload) { + onDataReload(); + } + break; + + case "loading-env-vars": + loadingStateRef.current = state; + if (onDataReload) { + onDataReload(vercelStagingEnvironment?.environmentId || undefined); + } + break; + + case "installing": + case "project-selection": + case "env-mapping": + case "env-var-sync": + case "completed": + case "build-settings": + case "github-connection": + loadingStateRef.current = null; + break; + } + }, [isOpen, state, onboardingData?.authInvalid, vercelStagingEnvironment, onDataReload, onClose]); + + useEffect(() => { + if (!onboardingData?.authInvalid && state === "loading-projects" && onboardingData?.availableProjects !== undefined) { + setState("project-selection"); + } + }, [state, onboardingData?.availableProjects, onboardingData?.authInvalid]); + + useEffect(() => { + if (!onboardingData?.authInvalid && state === "loading-env-vars" && onboardingData?.environmentVariables) { + setState("env-var-sync"); + } + }, [state, onboardingData?.environmentVariables, onboardingData?.authInvalid]); + + useEffect(() => { + if (state === "project-selection" && fetcher.data && "success" in fetcher.data && fetcher.data.success && fetcher.state === "idle") { + setState("loading-env-mapping"); + if (onDataReload) { + onDataReload(); + } + } else if (fetcher.data && "error" in fetcher.data && typeof fetcher.data.error === "string") { + setProjectSelectionError(fetcher.data.error); + } + }, [state, fetcher.data, fetcher.state, onDataReload]); + + // For marketplace origin, skip env-mapping step + useEffect(() => { + if (state === "loading-env-mapping" && onboardingData) { + const hasCustomEnvs = (onboardingData.customEnvironments?.length ?? 0) > 0 && hasStagingEnvironment; + if (hasCustomEnvs && !fromMarketplaceContext) { + setState("env-mapping"); + } else { + setState("loading-env-vars"); + } + } + }, [state, onboardingData, hasStagingEnvironment, fromMarketplaceContext]); + + const secretEnvVars = envVars.filter((v) => v.isSecret); + const syncableEnvVars = envVars.filter((v) => !v.isSecret); + const enabledEnvVars = syncableEnvVars.filter( + (v) => shouldSyncEnvVarForAnyEnvironment(syncEnvVarsMapping, v.key) + ); + + const overlappingEnvVarsCount = enabledEnvVars.filter((v) => existingVars[v.key]).length; + + const isSubmitting = + navigation.state === "submitting" || navigation.state === "loading"; + + const actionUrl = vercelResourcePath(organizationSlug, projectSlug, environmentSlug); + + const handleToggleEnvVar = useCallback((key: string, enabled: boolean) => { + setSyncEnvVarsMapping((prev) => { + const newMapping = { ...prev }; + + if (enabled) { + for (const envSlug of ALL_ENV_SLUGS) { + if (newMapping[envSlug]) { + const { [key]: _, ...rest } = newMapping[envSlug]; + if (Object.keys(rest).length === 0) { + delete newMapping[envSlug]; + } else { + newMapping[envSlug] = rest; + } + } + } + } else { + for (const envSlug of ALL_ENV_SLUGS) { + newMapping[envSlug] = { + ...(newMapping[envSlug] || {}), + [key]: false, + }; + } + } + + return newMapping; + }); + }, []); + + const handleToggleAllEnvVars = useCallback( + (enabled: boolean, syncableVars: Array<{ key: string }>) => { + if (enabled) { + setSyncEnvVarsMapping({}); + } else { + const newMapping: SyncEnvVarsMapping = {}; + for (const envSlug of ALL_ENV_SLUGS) { + newMapping[envSlug] = {}; + for (const v of syncableVars) { + newMapping[envSlug][v.key] = false; + } + } + setSyncEnvVarsMapping(newMapping); + } + }, + [] + ); + + const handleProjectSelection = useCallback(async () => { + if (!selectedVercelProject) { + setProjectSelectionError("Please select a Vercel project"); + return; + } + + setProjectSelectionError(null); + + const formData = new FormData(); + formData.append("action", "select-vercel-project"); + formData.append("vercelProjectId", selectedVercelProject.id); + formData.append("vercelProjectName", selectedVercelProject.name); + + fetcher.submit(formData, { + method: "post", + action: actionUrl, + }); + }, [selectedVercelProject, fetcher, actionUrl]); + + const handleSkipOnboarding = useCallback(() => { + onClose(); + + if (fromMarketplaceContext) { + return window.close(); + } + + const formData = new FormData(); + formData.append("action", "skip-onboarding"); + fetcher.submit(formData, { + method: "post", + action: actionUrl, + }); + }, [actionUrl, fetcher, onClose, nextUrl, fromMarketplaceContext]); + + const handleSkipEnvMapping = useCallback(() => { + setVercelStagingEnvironment(null); + setState("loading-env-vars"); + }, []); + + const handleUpdateEnvMapping = useCallback(() => { + if (!vercelStagingEnvironment) { + setState("loading-env-vars"); + return; + } + + const formData = new FormData(); + formData.append("action", "update-env-mapping"); + formData.append("vercelStagingEnvironment", JSON.stringify(vercelStagingEnvironment)); + + envMappingFetcher.submit(formData, { + method: "post", + action: actionUrl, + }); + + }, [vercelStagingEnvironment, envMappingFetcher, actionUrl]); + + const handleBuildSettingsNext = useCallback(() => { + if (nextUrl && fromMarketplaceContext && isGitHubConnectedForOnboarding) { + setIsRedirecting(true); + } + + const formData = new FormData(); + formData.append("action", "complete-onboarding"); + formData.append("vercelStagingEnvironment", vercelStagingEnvironment ? JSON.stringify(vercelStagingEnvironment) : ""); + formData.append("pullEnvVarsBeforeBuild", JSON.stringify(pullEnvVarsBeforeBuild)); + formData.append("atomicBuilds", JSON.stringify(atomicBuilds)); + formData.append("discoverEnvVars", JSON.stringify(discoverEnvVars)); + formData.append("syncEnvVarsMapping", JSON.stringify(syncEnvVarsMapping)); + if (nextUrl && fromMarketplaceContext && isGitHubConnectedForOnboarding) { + formData.append("next", nextUrl); + } + + if (!isGitHubConnectedForOnboarding) { + formData.append("skipRedirect", "true"); + } + + completeOnboardingFetcher.submit(formData, { + method: "post", + action: actionUrl, + }); + + if (!isGitHubConnectedForOnboarding) { + setState("github-connection"); + } + }, [vercelStagingEnvironment, pullEnvVarsBeforeBuild, atomicBuilds, discoverEnvVars, syncEnvVarsMapping, nextUrl, fromMarketplaceContext, isGitHubConnectedForOnboarding, completeOnboardingFetcher, actionUrl]); + + const handleFinishOnboarding = useCallback((e: React.FormEvent) => { + e.preventDefault(); + const form = e.currentTarget; + const formData = new FormData(form); + completeOnboardingFetcher.submit(formData, { + method: "post", + action: actionUrl, + }); + }, [completeOnboardingFetcher, actionUrl]); + + useEffect(() => { + if (completeOnboardingFetcher.data && typeof completeOnboardingFetcher.data === "object" && "success" in completeOnboardingFetcher.data && completeOnboardingFetcher.data.success && completeOnboardingFetcher.state === "idle") { + if (state === "github-connection") { + return; + } + if ("redirectTo" in completeOnboardingFetcher.data && typeof completeOnboardingFetcher.data.redirectTo === "string") { + const validRedirect = safeRedirectUrl(completeOnboardingFetcher.data.redirectTo); + if (validRedirect) { + window.location.href = validRedirect; + } + return; + } + setState("completed"); + } + }, [completeOnboardingFetcher.data, completeOnboardingFetcher.state, state]); + + useEffect(() => { + if (state === "completed") { + onClose(); + } + }, [state, onClose]); + + useEffect(() => { + if (state === "installing") { + const installUrl = vercelAppInstallPath(organizationSlug, projectSlug); + window.location.href = installUrl; + } + }, [state, organizationSlug, projectSlug]); + + useEffect(() => { + if (envMappingFetcher.data && typeof envMappingFetcher.data === "object" && "success" in envMappingFetcher.data && envMappingFetcher.data.success && envMappingFetcher.state === "idle") { + setState("loading-env-vars"); + } + }, [envMappingFetcher.data, envMappingFetcher.state]); + + useEffect(() => { + if (state === "env-mapping" && customEnvironments.length > 0 && !vercelStagingEnvironment) { + let selectedEnv: VercelCustomEnvironment; + + if (customEnvironments.length === 1) { + selectedEnv = customEnvironments[0]; + } else { + const stagingEnv = customEnvironments.find( + (env) => env.slug.toLowerCase() === "staging" + ); + selectedEnv = stagingEnv ?? customEnvironments[0]; + } + + setVercelStagingEnvironment({ environmentId: selectedEnv.id, displayName: selectedEnv.slug }); + } + }, [state, customEnvironments, vercelStagingEnvironment]); + + if (!isOpen || onboardingData?.authInvalid) { + return null; + } + + const isLoadingState = + state === "loading-projects" || + state === "loading-env-mapping" || + state === "loading-env-vars" || + state === "installing" || + (state === "idle" && !onboardingData); + + if (isLoadingState) { + return ( + !open && !fromMarketplaceContext && onClose()}> + + +
+ + Set up Vercel Integration +
+
+
+ +
+
+
+ ); + } + + const showProjectSelection = state === "project-selection"; + const showEnvMapping = state === "env-mapping"; + const showEnvVarSync = state === "env-var-sync"; + const showBuildSettings = state === "build-settings"; + const showGitHubConnection = state === "github-connection"; + + return ( + !open && !fromMarketplaceContext && onClose()}> + + +
+ + Set up Vercel Integration +
+
+ +
+ {showProjectSelection && ( +
+ Select Vercel Project + + Choose which Vercel project to connect with this Trigger.dev project. + Your API keys will be automatically synced to Vercel. + + + {availableProjects.length === 0 ? ( + + No Vercel projects found. Please create a project in Vercel first. + + ) : ( + + )} + + {projectSelectionError && ( + {projectSelectionError} + )} + + + Once connected, your TRIGGER_SECRET_KEY will be + automatically synced to Vercel for each environment. + + + + {fetcher.state !== "idle" ? "Connecting..." : "Connect Project"} + + } + cancelButton={ + + } + /> +
+ )} + + {showEnvMapping && ( +
+ Map Vercel Environment to Staging + + Select which custom Vercel environment should map to Trigger.dev's Staging + environment. Production and Preview environments are mapped automatically. + + + + +
+ +
+ {!fromMarketplaceContext && ( + + )} + +
+
+
+ )} + + {showEnvVarSync && ( +
+ Pull Environment Variables + + Select which environment variables to pull from Vercel now. This is a one-time pull. + + +
+
+ {syncableEnvVars.length} + can be pulled +
+ {secretEnvVars.length > 0 && ( +
+ {secretEnvVars.length} + secret (cannot pull) +
+ )} +
+ +
+
+ + Select all variables to pull from Vercel. +
+ handleToggleAllEnvVars(checked, syncableEnvVars)} + /> +
+ + {syncableEnvVars.length > 0 && ( +
+ + + {expandedEnvVars && ( +
+ {syncableEnvVars.map((envVar) => ( +
+
+ {existingVars[envVar.key] ? ( + + + +
+ {envVar.key} +
+
+ + {`This variable is going to be replaced in: ${existingVars[ + envVar.key + ].environments.join(", ")}`} + +
+
+ ) : ( + {envVar.key} + )} + {envVar.target && envVar.target.length > 0 && ( + + {formatVercelTargets(envVar.target)} + {envVar.isShared && " · Shared"} + + )} +
+ + handleToggleEnvVar(envVar.key, checked) + } + /> +
+ ))} +
+ )} +
+ )} + + {secretEnvVars.length > 0 && ( +
+ + + {expandedSecretEnvVars && ( +
+ {secretEnvVars.map((envVar) => ( +
+
+ {envVar.key} + {envVar.target && envVar.target.length > 0 && ( + + {formatVercelTargets(envVar.target)} + {envVar.isShared && " · Shared"} + + )} +
+ Secret +
+ ))} +
+ )} +
+ )} + + {overlappingEnvVarsCount > 0 && enabledEnvVars.length > 0 && ( +
+ + + {overlappingEnvVarsCount} env vars are going to be updated (marked with{" "} + + underline + + ) + +
+ )} + + { + if (fromMarketplaceContext) { + handleBuildSettingsNext(); + } else { + setState("build-settings"); + } + }} + disabled={fromMarketplaceContext && (completeOnboardingFetcher.state !== "idle" || isRedirecting)} + LeadingIcon={fromMarketplaceContext && (completeOnboardingFetcher.state !== "idle" || isRedirecting) ? SpinnerWhite : undefined} + > + {fromMarketplaceContext ? (isGitHubConnectedForOnboarding ? "Finish" : "Next") : "Next"} + + } + cancelButton={ + hasCustomEnvs && !fromMarketplaceContext ? ( + + ) : ( + + ) + } + /> +
+ )} + + {showBuildSettings && ( +
+ Build Settings + + Configure how environment variables are pulled during builds and atomic deployments. + + + + + + {isGitHubConnectedForOnboarding ? "Finish" : "Next"} + + } + cancelButton={ + + } + /> +
+ )} + + {showGitHubConnection && ( +
+ Connect GitHub Repository + + To fully integrate with Vercel, Trigger.dev needs access to your source code. + This allows automatic deployments and build synchronization. + + + +

+ Connecting your GitHub repository enables Trigger.dev to read your source code + and automatically create deployments when you push changes to Vercel. +

+
+ + {(() => { + const baseSettingsPath = v3ProjectSettingsPath( + { slug: organizationSlug }, + { slug: projectSlug }, + { slug: environmentSlug } + ); + const redirectParams = new URLSearchParams(); + redirectParams.set("vercelOnboarding", "true"); + if (fromMarketplaceContext) { + redirectParams.set("origin", "marketplace"); + } + if (nextUrl) { + redirectParams.set("next", nextUrl); + } + const redirectUrlWithContext = `${baseSettingsPath}?${redirectParams.toString()}`; + + return gitHubAppInstallations.length === 0 ? ( +
+ + Install GitHub app + +
+ ) : ( +
+
+ + + GitHub app is installed + +
+
+ ); + })()} + + { + setState("completed"); + const validUrl = safeRedirectUrl(nextUrl); + if (validUrl) { + window.location.href = validUrl; + } + }} + > + Complete + + ) : ( + + ) + } + cancelButton={ + isGitHubConnectedForOnboarding && fromMarketplaceContext && nextUrl ? ( + + ) : undefined + } + /> +
+ )} +
+
+
+ ); +} diff --git a/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx b/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx index e42928cdd6a..8758e181ff8 100644 --- a/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx +++ b/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx @@ -3,6 +3,7 @@ import { ChartBarIcon, Cog8ToothIcon, CreditCardIcon, + PuzzlePieceIcon, UserGroupIcon, } from "@heroicons/react/20/solid"; import { ArrowLeftIcon } from "@heroicons/react/24/solid"; @@ -12,6 +13,7 @@ import { cn } from "~/utils/cn"; import { organizationSettingsPath, organizationTeamPath, + organizationVercelIntegrationPath, rootPath, v3BillingAlertsPath, v3BillingPath, @@ -113,6 +115,13 @@ export function OrganizationSettingsSideMenu({ to={organizationSettingsPath(organization)} data-action="settings" /> +
diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index dcbcac079a0..6733af0addb 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -425,6 +425,11 @@ const EnvironmentSchema = z ORG_SLACK_INTEGRATION_CLIENT_ID: z.string().optional(), ORG_SLACK_INTEGRATION_CLIENT_SECRET: z.string().optional(), + /** Vercel integration OAuth credentials */ + VERCEL_INTEGRATION_CLIENT_ID: z.string().optional(), + VERCEL_INTEGRATION_CLIENT_SECRET: z.string().optional(), + VERCEL_INTEGRATION_APP_SLUG: z.string().optional(), + /** These enable the alerts feature in v3 */ ALERT_EMAIL_TRANSPORT: z.enum(["resend", "smtp", "aws-ses"]).optional(), ALERT_FROM_EMAIL: z.string().optional(), diff --git a/apps/webapp/app/models/orgIntegration.server.ts b/apps/webapp/app/models/orgIntegration.server.ts index 343da2701d9..a69aa917671 100644 --- a/apps/webapp/app/models/orgIntegration.server.ts +++ b/apps/webapp/app/models/orgIntegration.server.ts @@ -47,6 +47,13 @@ export type AuthenticatableIntegration = OrganizationIntegration & { tokenReference: SecretReference; }; +export function isIntegrationForService( + integration: AuthenticatableIntegration, + service: TService +): integration is OrganizationIntegrationForService { + return (integration.service satisfies IntegrationService) === service; +} + export class OrgIntegrationRepository { static async getAuthenticatedClientForIntegration( integration: OrganizationIntegrationForService, @@ -89,6 +96,23 @@ export class OrgIntegrationRepository { static isSlackSupported = !!env.ORG_SLACK_INTEGRATION_CLIENT_ID && !!env.ORG_SLACK_INTEGRATION_CLIENT_SECRET; + static isVercelSupported = + !!env.VERCEL_INTEGRATION_CLIENT_ID && !!env.VERCEL_INTEGRATION_CLIENT_SECRET && !!env.VERCEL_INTEGRATION_APP_SLUG; + + /** + * Generate the URL to install the Vercel integration. + * Users are redirected to Vercel's marketplace to complete the installation. + * + * @param state - Base64-encoded state containing org/project info for the callback + */ + static vercelInstallUrl(state: string): string { + // The user goes to Vercel's marketplace to install the integration + // After installation, Vercel redirects to our callback with the authorization code + const redirectUri = encodeURIComponent(`${env.APP_ORIGIN}/vercel/callback`); + const encodedState = encodeURIComponent(state); + return `https://vercel.com/integrations/${env.VERCEL_INTEGRATION_APP_SLUG}/new?state=${encodedState}&redirect_uri=${redirectUri}`; + } + static slackAuthorizationUrl( state: string, scopes: string[] = [ diff --git a/apps/webapp/app/models/vercelIntegration.server.ts b/apps/webapp/app/models/vercelIntegration.server.ts new file mode 100644 index 00000000000..c31b8bde27a --- /dev/null +++ b/apps/webapp/app/models/vercelIntegration.server.ts @@ -0,0 +1,1659 @@ +import pLimit from "p-limit"; +import { Vercel } from "@vercel/sdk"; +import type { + ResponseBodyEnvs, + FilterProjectEnvsResponseBody, +} from "@vercel/sdk/models/filterprojectenvsop"; +import type { + GetV9ProjectsIdOrNameCustomEnvironmentsEnvironments, +} from "@vercel/sdk/models/getv9projectsidornamecustomenvironmentsop"; +import type { ResponseBodyProjects } from "@vercel/sdk/models/getprojectsop"; +import { + Organization, + OrganizationIntegration, + SecretReference, +} from "@trigger.dev/database"; +import { z } from "zod"; +import { ResultAsync, errAsync, okAsync } from "neverthrow"; +import { $transaction, prisma } from "~/db.server"; +import { env } from "~/env.server"; +import { logger } from "~/services/logger.server"; +import { getSecretStore } from "~/services/secrets/secretStore.server"; +import { generateFriendlyId } from "~/v3/friendlyIdentifiers"; +import { + SyncEnvVarsMapping, + shouldSyncEnvVar, + TriggerEnvironmentType, + envTypeToVercelTarget, +} from "~/v3/vercel/vercelProjectIntegrationSchema"; +import { EnvironmentVariablesRepository } from "~/v3/environmentVariables/environmentVariablesRepository.server"; + +// --------------------------------------------------------------------------- +// Pure helpers +// --------------------------------------------------------------------------- + +function normalizeTarget(target: string[] | string | undefined): string[] { + if (Array.isArray(target)) return target.filter(Boolean); + if (typeof target === 'string') return [target]; + return []; +} + +function extractVercelEnvs( + response: FilterProjectEnvsResponseBody +): ResponseBodyEnvs[] { + if ("envs" in response && Array.isArray(response.envs)) { + return response.envs; + } + return []; +} + +function isVercelSecretType(type: string): boolean { + return type === "secret" || type === "sensitive"; +} + +// --------------------------------------------------------------------------- +// Error handling +// --------------------------------------------------------------------------- + +export type VercelApiError = { + message: string; + authInvalid: boolean; +}; + +const VercelErrorSchema = z.union([ + z.object({ status: z.number() }), + z.object({ response: z.object({ status: z.number() }) }), + z.object({ statusCode: z.number() }), +]); + +function extractVercelErrorStatus(error: unknown): number | null { + if (error && typeof error === 'object' && 'status' in error) { + const parsed = VercelErrorSchema.safeParse(error); + if (parsed.success && 'status' in parsed.data) { + return parsed.data.status; + } + } + + if (error && typeof error === 'object' && 'response' in error) { + const parsed = VercelErrorSchema.safeParse(error); + if (parsed.success && 'response' in parsed.data) { + return parsed.data.response.status; + } + } + + if (error && typeof error === 'object' && 'statusCode' in error) { + const parsed = VercelErrorSchema.safeParse(error); + if (parsed.success && 'statusCode' in parsed.data) { + return parsed.data.statusCode; + } + } + + if (typeof error === 'string') { + if (error.includes('401')) return 401; + if (error.includes('403')) return 403; + } + + return null; +} + +function isVercelAuthError(error: unknown): boolean { + const status = extractVercelErrorStatus(error); + return status === 401 || status === 403; +} + +function toVercelApiError(error: unknown): VercelApiError { + if (isVercelApiErrorShape(error)) return error; + return { + message: error instanceof Error ? error.message : "Unknown error", + authInvalid: isVercelAuthError(error), + }; +} + +function isVercelApiErrorShape(error: unknown): error is VercelApiError { + return ( + error !== null && + typeof error === "object" && + "message" in error && + "authInvalid" in error && + typeof (error as VercelApiError).message === "string" && + typeof (error as VercelApiError).authInvalid === "boolean" + ); +} + +/** + * Wrap a Vercel SDK call in ResultAsync with structured error logging. + */ +function wrapVercelCall( + promise: Promise, + message: string, + context: Record +): ResultAsync { + return ResultAsync.fromPromise(promise, (error) => { + const apiError = toVercelApiError(error); + logger.error(message, { ...context, error, authInvalid: apiError.authInvalid }); + return apiError; + }); +} + +// --------------------------------------------------------------------------- +// Schemas & token types +// --------------------------------------------------------------------------- + +export const VercelSecretSchema = z.object({ + accessToken: z.string(), + tokenType: z.string().optional(), + teamId: z.string().nullable().optional(), + userId: z.string().optional(), + installationId: z.string().optional(), + raw: z.record(z.any()).optional(), +}); + +export type VercelSecret = z.infer; + +export type TokenResponse = { + accessToken: string; + tokenType: string; + teamId?: string; + userId?: string; + raw: Record; +}; + +// --------------------------------------------------------------------------- +// Domain types narrowed from Vercel SDK response types. +// +// Using Pick and indexed-access types ties these definitions to the SDK so +// that any upstream type change surfaces as a compile error here rather than +// silently breaking at runtime. +// --------------------------------------------------------------------------- + +/** Narrowed env-var type from the SDK's FilterProjectEnvs response. */ +export type VercelEnvironmentVariable = { + id: string; // narrowed from ResponseBodyEnvs["id"] (string | undefined) + key: ResponseBodyEnvs["key"]; + type: ResponseBodyEnvs["type"]; + isSecret: boolean; + target: string[]; + isShared?: boolean; + customEnvironmentIds: string[]; +}; + +/** Narrowed custom-environment type – only the fields we consume. */ +export type VercelCustomEnvironment = Pick< + GetV9ProjectsIdOrNameCustomEnvironmentsEnvironments, + "id" | "slug" | "description" | "branchMatcher" +>; + +/** Narrowed env-var-with-value type from the SDK's FilterProjectEnvs response. */ +export type VercelEnvironmentVariableValue = { + key: ResponseBodyEnvs["key"]; + value: string; // narrowed from ResponseBodyEnvs["value"] – only present after null-check + target: string[]; + type: ResponseBodyEnvs["type"]; + isSecret: boolean; +}; + +/** Narrowed Vercel project type – only id and name. */ +export type VercelProject = Pick; + +// --------------------------------------------------------------------------- +// Mapper functions – narrow wide SDK responses into our domain types. +// --------------------------------------------------------------------------- + +function toVercelEnvironmentVariable( + env: ResponseBodyEnvs +): VercelEnvironmentVariable { + return { + id: env.id ?? "", + key: env.key, + type: env.type, + isSecret: isVercelSecretType(env.type), + target: normalizeTarget(env.target), + customEnvironmentIds: env.customEnvironmentIds ?? [], + }; +} + +function toVercelCustomEnvironment({ + id, + slug, + description, + branchMatcher, +}: GetV9ProjectsIdOrNameCustomEnvironmentsEnvironments): VercelCustomEnvironment { + return { id, slug, description, branchMatcher }; +} + +function toVercelEnvironmentVariableValue( + env: ResponseBodyEnvs +): VercelEnvironmentVariableValue | null { + if (!env.value) return null; + return { + key: env.key, + value: env.value, + target: normalizeTarget(env.target), + type: env.type, + isSecret: isVercelSecretType(env.type), + }; +} + +// --------------------------------------------------------------------------- +// Repository +// --------------------------------------------------------------------------- + +export class VercelIntegrationRepository { + static exchangeCodeForToken(code: string): ResultAsync { + const clientId = env.VERCEL_INTEGRATION_CLIENT_ID; + const clientSecret = env.VERCEL_INTEGRATION_CLIENT_SECRET; + const redirectUri = `${env.APP_ORIGIN}/vercel/callback`; + + if (!clientId || !clientSecret) { + logger.error("Vercel integration not configured"); + return errAsync({ message: "Vercel integration not configured", authInvalid: false }); + } + + return ResultAsync.fromPromise( + fetch("https://api.vercel.com/v2/oauth/access_token", { + method: "POST", + headers: { + "Content-Type": "application/x-www-form-urlencoded", + }, + body: new URLSearchParams({ + client_id: clientId, + client_secret: clientSecret, + code, + redirect_uri: redirectUri, + }), + }).then(async (response) => { + if (!response.ok) { + const errorText = await response.text(); + logger.error("Failed to exchange Vercel OAuth code", { + status: response.status, + error: errorText, + }); + throw new Error(`HTTP ${response.status}: ${errorText}`); + } + return response.json() as Promise<{ + access_token: string; + token_type: string; + team_id?: string; + user_id?: string; + }>; + }), + (error) => { + logger.error("Error exchanging Vercel OAuth code", { error }); + return toVercelApiError(error); + } + ).map((data): TokenResponse => ({ + accessToken: data.access_token, + tokenType: data.token_type, + teamId: data.team_id, + userId: data.user_id, + raw: data as Record, + })); + } + + static getVercelClient( + integration: OrganizationIntegration & { tokenReference: SecretReference } + ): ResultAsync { + return ResultAsync.fromPromise( + (async () => { + const secretStore = getSecretStore(integration.tokenReference.provider); + const secret = await secretStore.getSecret( + VercelSecretSchema, + integration.tokenReference.key + ); + if (!secret) { + throw new Error("Failed to get Vercel access token"); + } + return new Vercel({ bearerToken: secret.accessToken }); + })(), + (error) => toVercelApiError(error) + ); + } + + static getTeamSlug( + client: Vercel, + teamId: string | null + ): ResultAsync { + if (teamId) { + return wrapVercelCall( + client.teams.getTeam({ teamId }), + "Failed to fetch Vercel team", + { teamId } + ).map((response) => response.slug); + } + + return wrapVercelCall( + client.user.getAuthUser(), + "Failed to fetch Vercel user", + {} + ).map((response) => response?.user.username ?? "unknown"); + } + + static validateVercelToken( + integration: OrganizationIntegration & { tokenReference: SecretReference } + ): ResultAsync<{ isValid: boolean }, VercelApiError> { + return this.getVercelClient(integration) + .andThen((client) => + ResultAsync.fromPromise( + client.user.getAuthUser(), + toVercelApiError + ) + ) + .map(() => ({ isValid: true })) + .orElse((error) => + error.authInvalid + ? okAsync({ isValid: false }) + : errAsync(error) + ); + } + + static async getTeamIdFromIntegration( + integration: OrganizationIntegration & { tokenReference: SecretReference } + ): Promise { + const secretStore = getSecretStore(integration.tokenReference.provider); + + const secret = await secretStore.getSecret( + VercelSecretSchema, + integration.tokenReference.key + ); + + if (!secret) { + return null; + } + + return secret.teamId ?? null; + } + + static getVercelIntegrationConfiguration( + accessToken: string, + configurationId: string, + teamId?: string | null + ): ResultAsync<{ + id: string; + teamId: string | null; + projects: string[]; + }, VercelApiError> { + return ResultAsync.fromPromise( + fetch( + `https://api.vercel.com/v1/integrations/configuration/${configurationId}${teamId ? `?teamId=${teamId}` : ""}`, + { + method: "GET", + headers: { + Authorization: `Bearer ${accessToken}`, + "Content-Type": "application/json", + }, + } + ).then(async (response) => { + if (!response.ok) { + const errorText = await response.text(); + logger.error("Failed to fetch Vercel integration configuration", { + status: response.status, + error: errorText, + configurationId, + teamId, + }); + throw new Error(`HTTP ${response.status}: ${errorText}`); + } + return response.json() as Promise<{ + id: string; + teamId?: string | null; + projects?: string[]; + [key: string]: any; + }>; + }), + (error) => { + logger.error("Error fetching Vercel integration configuration", { + configurationId, + teamId, + error, + }); + return toVercelApiError(error); + } + ).map((data) => ({ + id: data.id, + teamId: data.teamId ?? null, + projects: data.projects || [], + })); + } + + static getVercelCustomEnvironments( + client: Vercel, + projectId: string, + teamId?: string | null + ): ResultAsync { + return wrapVercelCall( + client.environment.getV9ProjectsIdOrNameCustomEnvironments({ + idOrName: projectId, + ...(teamId && { teamId }), + }), + "Failed to fetch Vercel custom environments", + { projectId, teamId } + ).map((response) => (response.environments || []).map(toVercelCustomEnvironment)); + } + + static getVercelEnvironmentVariables( + client: Vercel, + projectId: string, + teamId?: string | null, + ): ResultAsync { + return wrapVercelCall( + client.projects.filterProjectEnvs({ + idOrName: projectId, + ...(teamId && { teamId }), + }), + "Failed to fetch Vercel environment variables", + { projectId, teamId } + ).map((response) => { + // Warn if response is paginated (more data exists that we're not fetching) + if ( + "pagination" in response && + response.pagination && + "next" in response.pagination && + response.pagination.next !== null + ) { + logger.warn( + "Vercel filterProjectEnvs returned paginated response - some env vars may be missing", + { projectId, count: response.pagination.count } + ); + } + return extractVercelEnvs(response).map(toVercelEnvironmentVariable); + }); + } + + static getVercelEnvironmentVariableValues( + client: Vercel, + projectId: string, + teamId?: string | null, + target?: string, + /** If provided, only include keys that pass this filter */ + shouldIncludeKey?: (key: string) => boolean + ): ResultAsync { + return wrapVercelCall( + client.projects.filterProjectEnvs({ + idOrName: projectId, + ...(teamId && { teamId }), + }), + "Failed to fetch Vercel environment variable values", + { projectId, teamId, target } + ).andThen((response) => { + // Apply all filters BEFORE decryption to avoid unnecessary API calls + const filteredEnvs = extractVercelEnvs(response).filter((env) => { + if (target && !normalizeTarget(env.target).includes(target)) return false; + if (shouldIncludeKey && !shouldIncludeKey(env.key)) return false; + if (isVercelSecretType(env.type)) return false; + return true; + }); + + // Fetch decrypted values for encrypted vars, use list values for others + const concurrencyLimit = pLimit(5); + return ResultAsync.fromPromise( + Promise.all( + filteredEnvs.map((env) => + concurrencyLimit(() => this.#resolveEnvVarValue(client, projectId, teamId, env)) + ) + ), + (error) => toVercelApiError(error) + ).map((results) => results.filter((v): v is VercelEnvironmentVariableValue => v !== null)); + }); + } + + static async #resolveEnvVarValue( + client: Vercel, + projectId: string, + teamId: string | null | undefined, + env: ResponseBodyEnvs + ): Promise { + // Non-encrypted vars: use value from list response if present + if (env.type !== "encrypted" || !env.id) { + if (env.value === undefined || env.value === null) return null; + return toVercelEnvironmentVariableValue(env); + } + + // Encrypted vars: fetch decrypted value via individual endpoint + // (list endpoint's decrypt param is deprecated) + const result = await ResultAsync.fromPromise( + client.projects.getProjectEnv({ + idOrName: projectId, + id: env.id, + ...(teamId && { teamId }), + }), + (error) => error + ); + + if (result.isErr()) { + logger.warn("Failed to decrypt Vercel env var", { + projectId, + envVarKey: env.key, + error: result.error instanceof Error ? result.error.message : String(result.error), + }); + return null; + } + + // API returns union: ResponseBody1 has no value, ResponseBody2/3 have value + const decryptedValue = (result.value as { value?: string }).value; + if (typeof decryptedValue !== "string") return null; + + return { + key: env.key, + value: decryptedValue, + target: normalizeTarget(env.target), + type: env.type, + isSecret: false, + }; + } + + static getVercelSharedEnvironmentVariables( + client: Vercel, + teamId: string, + projectId?: string // Optional: filter by project + ): ResultAsync, VercelApiError> { + return wrapVercelCall( + client.environment.listSharedEnvVariable({ + teamId, + ...(projectId && { projectId }), + }), + "Failed to fetch Vercel shared environment variables", + { teamId, projectId } + ).map((response) => { + const envVars = response.data || []; + return envVars + .filter((env): env is typeof env & { id: string; key: string } => + typeof env.id === "string" && typeof env.key === "string" + ) + .map((env) => { + const type = env.type || "plain"; + return { + id: env.id, + key: env.key, + type, + isSecret: isVercelSecretType(type), + target: normalizeTarget(env.target), + }; + }); + }); + } + + static getVercelSharedEnvironmentVariableValues( + client: Vercel, + teamId: string, + projectId?: string // Optional: filter by project + ): ResultAsync< + Array<{ + key: string; + value: string; + target: string[]; + type: string; + isSecret: boolean; + applyToAllCustomEnvironments?: boolean; + }>, + VercelApiError + > { + return wrapVercelCall( + client.environment.listSharedEnvVariable({ + teamId, + ...(projectId && { projectId }), + }), + "Failed to fetch Vercel shared environment variable values", + { teamId, projectId } + ).andThen((listResponse) => { + const envVars = listResponse.data || []; + if (envVars.length === 0) { + return okAsync([]); + } + + const concurrencyLimit = pLimit(5); + return ResultAsync.fromPromise( + Promise.all( + envVars.map((env) => + concurrencyLimit(async () => { + if (!env.id || !env.key) return null; + + const envId = env.id; + const envKey = env.key; + const type = env.type || "plain"; + const isSecret = isVercelSecretType(type); + + if (isSecret) return null; + + const listValue = (env as any).value as string | undefined; + const applyToAllCustomEnvs = (env as any).applyToAllCustomEnvironments as boolean | undefined; + + if (listValue) { + return { + key: envKey, + value: listValue, + target: normalizeTarget(env.target), + type, + isSecret, + applyToAllCustomEnvironments: applyToAllCustomEnvs, + }; + } + + // Try to get the decrypted value for this shared env var + const getResult = await ResultAsync.fromPromise( + client.environment.getSharedEnvVar({ + id: envId, + teamId, + }), + (error) => error + ); + + if (getResult.isOk()) { + if (!getResult.value.value) return null; + return { + key: envKey, + value: getResult.value.value, + target: normalizeTarget(env.target), + type, + isSecret, + applyToAllCustomEnvironments: applyToAllCustomEnvs, + }; + } + + // Workaround: Vercel SDK may throw ResponseValidationError even when the API response + // is valid (e.g., deletedAt: null vs expected number). Extract value from rawValue. + const error = getResult.error; + let errorValue: string | undefined; + if (error && typeof error === "object" && "rawValue" in error) { + const rawValue = (error as any).rawValue; + if (rawValue && typeof rawValue === "object" && "value" in rawValue) { + errorValue = rawValue.value as string | undefined; + } + } + + const fallbackValue = errorValue || listValue; + + if (fallbackValue) { + logger.warn("getSharedEnvVar failed validation, using value from error.rawValue or list response", { + teamId, + envId, + envKey, + error: error instanceof Error ? error.message : String(error), + hasErrorRawValue: !!errorValue, + hasListValue: !!listValue, + valueLength: fallbackValue.length, + }); + return { + key: envKey, + value: fallbackValue, + target: normalizeTarget(env.target), + type, + isSecret, + applyToAllCustomEnvironments: applyToAllCustomEnvs, + }; + } + + logger.warn("Failed to get decrypted value for shared env var, no fallback available", { + teamId, + projectId, + envId, + envKey, + error: error instanceof Error ? error.message : String(error), + errorStack: error instanceof Error ? error.stack : undefined, + hasRawValue: error && typeof error === "object" && "rawValue" in error, + }); + return null; + }) + ) + ), + (error) => { + logger.error("Failed to process shared environment variable values", { + teamId, + projectId, + error: error instanceof Error ? error.message : String(error), + }); + return toVercelApiError(error); + } + ).map((results) => results.filter((r): r is NonNullable => r !== null)); + }); + } + + static getVercelProjects( + client: Vercel, + teamId?: string | null + ): ResultAsync { + return ResultAsync.fromPromise( + (async () => { + const allProjects: VercelProject[] = []; + let from: string | undefined; + + do { + const response = await client.projects.getProjects({ + ...(teamId && { teamId }), + limit: "100", + ...(from && { from }), + }); + + const projects = Array.isArray(response) + ? response + : "projects" in response + ? response.projects + : []; + allProjects.push(...projects.map(({ id, name }): VercelProject => ({ id, name }))); + + // Get pagination token for next page + const pagination = + !Array.isArray(response) && "pagination" in response + ? response.pagination + : undefined; + from = + pagination && "next" in pagination && pagination.next !== null + ? String(pagination.next) + : undefined; + } while (from); + + return allProjects; + })(), + (error) => { + logger.error("Failed to fetch Vercel projects", { teamId, error }); + return toVercelApiError(error); + } + ); + } + + static async updateVercelOrgIntegrationToken(params: { + integrationId: string; + accessToken: string; + tokenType?: string; + teamId: string | null; + userId?: string; + installationId?: string; + raw?: Record; + }): Promise { + await $transaction(prisma, async (tx) => { + const integration = await tx.organizationIntegration.findUnique({ + where: { id: params.integrationId }, + include: { tokenReference: true }, + }); + + if (!integration) { + throw new Error("Vercel integration not found"); + } + + const secretStore = getSecretStore(integration.tokenReference.provider, { + prismaClient: tx, + }); + + const secretValue: VercelSecret = { + accessToken: params.accessToken, + tokenType: params.tokenType, + teamId: params.teamId, + userId: params.userId, + installationId: params.installationId, + raw: params.raw, + }; + + await secretStore.setSecret(integration.tokenReference.key, secretValue); + + await tx.organizationIntegration.update({ + where: { id: params.integrationId }, + data: { + integrationData: { + teamId: params.teamId, + userId: params.userId, + installationId: params.installationId, + } as any, + }, + }); + }); + } + + static async createVercelOrgIntegration(params: { + accessToken: string; + tokenType?: string; + teamId: string | null; + userId?: string; + installationId?: string; + organization: Pick; + raw?: Record; + origin: 'marketplace' | 'dashboard'; + }): Promise { + const result = await $transaction(prisma, async (tx) => { + const secretStore = getSecretStore("DATABASE", { + prismaClient: tx, + }); + + const integrationFriendlyId = generateFriendlyId("org_integration"); + + const secretValue: VercelSecret = { + accessToken: params.accessToken, + tokenType: params.tokenType, + teamId: params.teamId, + userId: params.userId, + installationId: params.installationId, + raw: params.raw, + }; + + await secretStore.setSecret(integrationFriendlyId, secretValue); + + const reference = await tx.secretReference.create({ + data: { + provider: "DATABASE", + key: integrationFriendlyId, + }, + }); + + return await tx.organizationIntegration.create({ + data: { + friendlyId: integrationFriendlyId, + organizationId: params.organization.id, + service: "VERCEL", + externalOrganizationId: params.teamId, + tokenReferenceId: reference.id, + integrationData: { + teamId: params.teamId, + userId: params.userId, + installationId: params.installationId, + origin: params.origin, + } as any, + }, + }); + }); + + if (!result) { + throw new Error("Failed to create Vercel organization integration"); + } + + return result; + } + + static async findVercelOrgIntegrationByTeamId( + organizationId: string, + teamId: string | null + ): Promise<(OrganizationIntegration & { tokenReference: SecretReference }) | null> { + return prisma.organizationIntegration.findFirst({ + where: { + organizationId, + service: "VERCEL", + externalOrganizationId: teamId, + deletedAt: null, + }, + include: { + tokenReference: true, + }, + }); + } + + static async findVercelOrgIntegrationForProject( + projectId: string + ): Promise<(OrganizationIntegration & { tokenReference: SecretReference }) | null> { + const projectIntegration = await prisma.organizationProjectIntegration.findFirst({ + where: { + projectId, + deletedAt: null, + organizationIntegration: { + service: "VERCEL", + deletedAt: null, + }, + }, + include: { + organizationIntegration: { + include: { + tokenReference: true, + }, + }, + }, + }); + + return projectIntegration?.organizationIntegration ?? null; + } + + static async findVercelOrgIntegrationByOrganization( + organizationId: string + ): Promise<(OrganizationIntegration & { tokenReference: SecretReference }) | null> { + return prisma.organizationIntegration.findFirst({ + where: { + organizationId, + service: "VERCEL", + deletedAt: null, + }, + include: { + tokenReference: true, + }, + }); + } + + static syncApiKeysToVercel(params: { + projectId: string; + vercelProjectId: string; + teamId: string | null; + vercelStagingEnvironment?: { environmentId: string; displayName: string } | null; + orgIntegration: OrganizationIntegration & { tokenReference: SecretReference }; + }): ResultAsync<{ created: number; updated: number; errors: string[] }, VercelApiError> { + return this.getVercelClient(params.orgIntegration).andThen((client) => + ResultAsync.fromPromise( + (async () => { + // Get all environments for the project (exclude DEVELOPMENT — we don't push keys to Vercel's development target) + const environments = await prisma.runtimeEnvironment.findMany({ + where: { + projectId: params.projectId, + type: { + in: ["PRODUCTION", "STAGING", "PREVIEW"], + }, + }, + select: { + id: true, + type: true, + apiKey: true, + }, + }); + + // Build the list of env vars to sync + const envVarsToSync: Array<{ + key: string; + value: string; + target: string[]; + type: "sensitive" | "encrypted" | "plain"; + environmentType: string; + }> = []; + + for (const runtimeEnv of environments) { + const vercelTarget = envTypeToVercelTarget( + runtimeEnv.type as TriggerEnvironmentType, + params.vercelStagingEnvironment?.environmentId + ); + + if (!vercelTarget) { + continue; + } + + envVarsToSync.push({ + key: "TRIGGER_SECRET_KEY", + value: runtimeEnv.apiKey, + target: vercelTarget, + type: "encrypted", + environmentType: runtimeEnv.type, + }); + } + + if (envVarsToSync.length === 0) { + return { created: 0, updated: 0, errors: [] as string[] }; + } + + const result = await this.batchUpsertVercelEnvVars({ + client, + vercelProjectId: params.vercelProjectId, + teamId: params.teamId, + envVars: envVarsToSync, + }); + + logger.info("Synced API keys to Vercel", { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + syncedCount: result.created + result.updated, + created: result.created, + updated: result.updated, + errors: result.errors, + }); + + return result; + })(), + (error) => { + logger.error("Failed to sync API keys to Vercel", { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + error, + }); + return toVercelApiError(error); + } + ) + ); + } + + static syncSingleApiKeyToVercel(params: { + projectId: string; + environmentType: "PRODUCTION" | "STAGING" | "PREVIEW" | "DEVELOPMENT"; + apiKey: string; + }): ResultAsync { + return ResultAsync.fromPromise( + (async () => { + const projectIntegration = await prisma.organizationProjectIntegration.findFirst({ + where: { + projectId: params.projectId, + deletedAt: null, + organizationIntegration: { + service: "VERCEL", + deletedAt: null, + }, + }, + include: { + organizationIntegration: { + include: { + tokenReference: true, + }, + }, + }, + }); + + if (!projectIntegration) { + return; // No integration, nothing to sync + } + + const orgIntegration = projectIntegration.organizationIntegration; + const clientResult = await this.getVercelClient(orgIntegration); + if (clientResult.isErr()) throw clientResult.error; + const client = clientResult.value; + + const teamId = await this.getTeamIdFromIntegration(orgIntegration); + + const integrationData = projectIntegration.integrationData as any; + const vercelStagingEnvironment = integrationData?.config?.vercelStagingEnvironment; + + const vercelTarget = envTypeToVercelTarget( + params.environmentType, + vercelStagingEnvironment?.environmentId + ); + + if (!vercelTarget) { + return; + } + + await this.upsertVercelEnvVar({ + client, + vercelProjectId: projectIntegration.externalEntityId, + teamId, + key: "TRIGGER_SECRET_KEY", + value: params.apiKey, + target: vercelTarget, + type: "encrypted", + }); + + logger.info("Synced regenerated API key to Vercel", { + projectId: params.projectId, + vercelProjectId: projectIntegration.externalEntityId, + environmentType: params.environmentType, + target: vercelTarget, + }); + })(), + (error) => { + logger.error("Failed to sync API key to Vercel", { + projectId: params.projectId, + environmentType: params.environmentType, + error, + }); + return toVercelApiError(error); + } + ); + } + + static pullEnvVarsFromVercel(params: { + projectId: string; + vercelProjectId: string; + teamId: string | null; + vercelStagingEnvironment?: { environmentId: string; displayName: string } | null; + syncEnvVarsMapping: SyncEnvVarsMapping; + orgIntegration: OrganizationIntegration & { tokenReference: SecretReference }; + }): ResultAsync<{ syncedCount: number; errors: string[] }, VercelApiError> { + logger.info("pullEnvVarsFromVercel: Starting", { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + teamId: params.teamId, + vercelStagingEnvironment: params.vercelStagingEnvironment, + syncEnvVarsMappingKeys: Object.keys(params.syncEnvVarsMapping), + }); + + return this.getVercelClient(params.orgIntegration).andThen((client) => + ResultAsync.fromPromise( + (async () => { + const errors: string[] = []; + let syncedCount = 0; + + // Get all runtime environments for the project + const runtimeEnvironments = await prisma.runtimeEnvironment.findMany({ + where: { + projectId: params.projectId, + type: { + in: ["PRODUCTION", "STAGING", "PREVIEW", "DEVELOPMENT"], + }, + }, + select: { + id: true, + type: true, + }, + }); + + const envMapping: Array<{ + triggerEnvType: "PRODUCTION" | "STAGING" | "PREVIEW" | "DEVELOPMENT"; + vercelTarget: string; + runtimeEnvironmentId: string; + }> = []; + + for (const runtimeEnv of runtimeEnvironments) { + const vercelTarget = envTypeToVercelTarget( + runtimeEnv.type as TriggerEnvironmentType, + params.vercelStagingEnvironment?.environmentId + ); + + if (!vercelTarget) { + continue; + } + + envMapping.push({ + triggerEnvType: runtimeEnv.type as "PRODUCTION" | "STAGING" | "PREVIEW" | "DEVELOPMENT", + vercelTarget: vercelTarget[0], + runtimeEnvironmentId: runtimeEnv.id, + }); + } + + if (envMapping.length === 0) { + logger.warn("No environments to sync for Vercel integration", { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + }); + return { syncedCount: 0, errors: [] as string[] }; + } + + const envVarRepository = new EnvironmentVariablesRepository(); + + // Fetch shared env vars once (they apply across all targets) + let sharedEnvVars: Array<{ + key: string; + value: string; + target: string[]; + type: string; + isSecret: boolean; + applyToAllCustomEnvironments?: boolean; + }> = []; + + if (params.teamId) { + const sharedResult = await this.getVercelSharedEnvironmentVariableValues( + client, + params.teamId, + params.vercelProjectId + ); + sharedEnvVars = sharedResult.unwrapOr([]); + } + + // Process each environment mapping + for (const mapping of envMapping) { + const iterResult = await ResultAsync.fromPromise( + (async () => { + // Build filter to avoid decrypting vars that will be filtered out anyway + const excludeKeys = new Set(["TRIGGER_SECRET_KEY", "TRIGGER_VERSION"]); + const shouldIncludeKey = (key: string) => + !excludeKeys.has(key) && + shouldSyncEnvVar(params.syncEnvVarsMapping, key, mapping.triggerEnvType as TriggerEnvironmentType); + + const envVarsResult = await this.getVercelEnvironmentVariableValues( + client, + params.vercelProjectId, + params.teamId, + mapping.vercelTarget, + shouldIncludeKey + ); + + if (envVarsResult.isErr()) { + logger.error("pullEnvVarsFromVercel: Failed to get env vars", { + triggerEnvType: mapping.triggerEnvType, + vercelTarget: mapping.vercelTarget, + error: envVarsResult.error.message, + }); + errors.push(`Failed to get env vars for ${mapping.triggerEnvType}: ${envVarsResult.error.message}`); + return; + } + + const projectEnvVars = envVarsResult.value; + const standardTargets = ["production", "preview", "development"]; + const isCustomEnvironment = !standardTargets.includes(mapping.vercelTarget); + + const filteredSharedEnvVars = sharedEnvVars.filter((envVar) => { + const matchesTarget = envVar.target.includes(mapping.vercelTarget); + const matchesCustomEnv = isCustomEnvironment && envVar.applyToAllCustomEnvironments === true; + return matchesTarget || matchesCustomEnv; + }); + + const projectEnvVarKeys = new Set(projectEnvVars.map((v) => v.key)); + const sharedEnvVarsToAdd = filteredSharedEnvVars.filter((v) => !projectEnvVarKeys.has(v.key)); + const mergedEnvVars = [ + ...projectEnvVars, + ...sharedEnvVarsToAdd, + ]; + + if (mergedEnvVars.length === 0) { + return; + } + + const varsToSync = mergedEnvVars.filter((envVar) => { + if (envVar.isSecret) { + return false; + } + if (envVar.key === "TRIGGER_SECRET_KEY" || envVar.key === "TRIGGER_VERSION") { + return false; + } + return shouldSyncEnvVar( + params.syncEnvVarsMapping, + envVar.key, + mapping.triggerEnvType as TriggerEnvironmentType + ); + }); + + if (varsToSync.length === 0) { + return; + } + + const existingSecretKeys = new Set(); + const existingValues = new Map(); + + const existingVarValues = await prisma.environmentVariableValue.findMany({ + where: { + environmentId: mapping.runtimeEnvironmentId, + variable: { + projectId: params.projectId, + key: { + in: varsToSync.map((v) => v.key), + }, + }, + }, + select: { + isSecret: true, + valueReference: { + select: { + key: true, + }, + }, + variable: { + select: { + key: true, + }, + }, + }, + }); + + if (existingVarValues.length > 0) { + const secretStore = getSecretStore("DATABASE", { prismaClient: prisma }); + const SecretValue = z.object({ secret: z.string() }); + + for (const varValue of existingVarValues) { + if (varValue.isSecret) { + existingSecretKeys.add(varValue.variable.key); + } + + if (varValue.valueReference?.key) { + const existingSecret = await ResultAsync.fromPromise( + secretStore.getSecret(SecretValue, varValue.valueReference.key), + () => null + ).unwrapOr(null); + if (existingSecret) { + existingValues.set(varValue.variable.key, existingSecret.secret); + } + } + } + } + + const changedVars = varsToSync.filter((v) => { + const existingValue = existingValues.get(v.key); + return existingValue === undefined || existingValue !== v.value; + }); + + if (changedVars.length === 0) { + return; + } + + const secretVars = changedVars.filter((v) => existingSecretKeys.has(v.key)); + const nonSecretVars = changedVars.filter((v) => !existingSecretKeys.has(v.key)); + + if (nonSecretVars.length > 0) { + const result = await envVarRepository.create(params.projectId, { + override: true, + environmentIds: [mapping.runtimeEnvironmentId], + isSecret: false, + variables: nonSecretVars.map((v) => ({ + key: v.key, + value: v.value, + })), + lastUpdatedBy: { + type: "integration", + integration: "vercel", + }, + }); + + if (result.success) { + syncedCount += nonSecretVars.length; + } else { + const errorMsg = `Failed to sync env vars for ${mapping.triggerEnvType}: ${result.error}`; + errors.push(errorMsg); + logger.error(errorMsg, { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + vercelTarget: mapping.vercelTarget, + error: result.error, + variableErrors: result.variableErrors, + attemptedKeys: nonSecretVars.map((v) => v.key), + }); + } + } + + if (secretVars.length > 0) { + const result = await envVarRepository.create(params.projectId, { + override: true, + environmentIds: [mapping.runtimeEnvironmentId], + isSecret: true, + variables: secretVars.map((v) => ({ + key: v.key, + value: v.value, + })), + lastUpdatedBy: { + type: "integration", + integration: "vercel", + }, + }); + + if (result.success) { + syncedCount += secretVars.length; + } else { + const errorMsg = `Failed to sync secret env vars for ${mapping.triggerEnvType}: ${result.error}`; + errors.push(errorMsg); + logger.error(errorMsg, { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + vercelTarget: mapping.vercelTarget, + error: result.error, + variableErrors: result.variableErrors, + attemptedKeys: secretVars.map((v) => v.key), + }); + } + } + })(), + (error) => error + ); + + if (iterResult.isErr()) { + const errorMsg = `Failed to process env vars for ${mapping.triggerEnvType}: ${iterResult.error instanceof Error ? iterResult.error.message : "Unknown error"}`; + errors.push(errorMsg); + logger.error(errorMsg, { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + vercelTarget: mapping.vercelTarget, + error: iterResult.error, + }); + } + } + + return { syncedCount, errors }; + })(), + (error) => { + logger.error("Failed to pull env vars from Vercel", { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + error, + }); + return toVercelApiError(error); + } + ) + ); + } + + static async batchUpsertVercelEnvVars(params: { + client: Vercel; + vercelProjectId: string; + teamId: string | null; + envVars: Array<{ + key: string; + value: string; + target: string[]; + type: "sensitive" | "encrypted" | "plain"; + environmentType?: string; // For logging purposes + }>; + }): Promise<{ created: number; updated: number; errors: string[] }> { + const { client, vercelProjectId, teamId, envVars } = params; + const errors: string[] = []; + let created = 0; + let updated = 0; + + if (envVars.length === 0) { + return { created: 0, updated: 0, errors: [] }; + } + + const existingEnvs = await client.projects.filterProjectEnvs({ + idOrName: vercelProjectId, + ...(teamId && { teamId }), + }); + + const existingEnvsList = extractVercelEnvs(existingEnvs); + + const toCreate: Array<{ + key: string; + value: string; + target: string[]; + type: "sensitive" | "encrypted" | "plain"; + }> = []; + + const toUpdate: Array<{ + id: string; + key: string; + value: string; + target: string[]; + type: "sensitive" | "encrypted" | "plain"; + environmentType?: string; + }> = []; + + for (const envVar of envVars) { + const existingEnv = existingEnvsList.find((existing) => { + if (existing.key !== envVar.key) { + return false; + } + const envTargets = normalizeTarget(existing.target); + return ( + envVar.target.length === envTargets.length && + envVar.target.every((t) => envTargets.includes(t)) + ); + }); + + if (existingEnv && existingEnv.id) { + toUpdate.push({ + id: existingEnv.id, + key: envVar.key, + value: envVar.value, + target: envVar.target, + type: envVar.type, + environmentType: envVar.environmentType, + }); + } else { + toCreate.push({ + key: envVar.key, + value: envVar.value, + target: envVar.target, + type: envVar.type, + }); + } + } + + if (toCreate.length > 0) { + const createResult = await ResultAsync.fromPromise( + client.projects.createProjectEnv({ + idOrName: vercelProjectId, + ...(teamId && { teamId }), + requestBody: toCreate.map((item) => ({ + key: item.key, + value: item.value, + target: item.target as any, + type: item.type, + })) as any, + }), + (error) => error + ); + + if (createResult.isOk()) { + created = toCreate.length; + } else { + const errorMsg = `Failed to batch create env vars: ${createResult.error instanceof Error ? createResult.error.message : "Unknown error"}`; + errors.push(errorMsg); + logger.error(errorMsg, { + vercelProjectId, + teamId, + count: toCreate.length, + error: createResult.error, + }); + } + } + + // Update existing env vars (Vercel doesn't support batch updates) + for (const envVar of toUpdate) { + const updateResult = await ResultAsync.fromPromise( + client.projects.editProjectEnv({ + idOrName: vercelProjectId, + id: envVar.id, + ...(teamId && { teamId }), + requestBody: { + value: envVar.value, + target: envVar.target as any, + type: envVar.type, + }, + }), + (error) => error + ); + + if (updateResult.isOk()) { + updated++; + } else { + const errorMsg = `Failed to update ${envVar.environmentType || envVar.key} env var: ${updateResult.error instanceof Error ? updateResult.error.message : "Unknown error"}`; + errors.push(errorMsg); + logger.error(errorMsg, { + vercelProjectId, + teamId, + envVarId: envVar.id, + key: envVar.key, + error: updateResult.error, + }); + } + } + + return { created, updated, errors }; + } + + private static async upsertVercelEnvVar(params: { + client: Vercel; + vercelProjectId: string; + teamId: string | null; + key: string; + value: string; + target: string[]; + type: "sensitive" | "encrypted" | "plain"; + }): Promise { + const { client, vercelProjectId, teamId, key, value, target, type } = params; + + const existingEnvs = await client.projects.filterProjectEnvs({ + idOrName: vercelProjectId, + ...(teamId && { teamId }), + }); + + const envs = extractVercelEnvs(existingEnvs); + + // Vercel can have multiple env vars with the same key but different targets + const existingEnv = envs.find((existing) => { + if (existing.key !== key) { + return false; + } + const envTargets = normalizeTarget(existing.target); + return target.length === envTargets.length && target.every((t) => envTargets.includes(t)); + }); + + if (existingEnv && existingEnv.id) { + await client.projects.editProjectEnv({ + idOrName: vercelProjectId, + id: existingEnv.id, + ...(teamId && { teamId }), + requestBody: { + value, + target: target as any, + type, + }, + }); + } else { + await client.projects.createProjectEnv({ + idOrName: vercelProjectId, + ...(teamId && { teamId }), + requestBody: { + key, + value, + target: target as any, + type, + }, + }); + } + } + + static getAutoAssignCustomDomains( + client: Vercel, + vercelProjectId: string, + teamId?: string | null + ): ResultAsync { + // Vercel SDK lacks a getProject method — updateProject with empty body reads without modifying. + return wrapVercelCall( + client.projects.updateProject({ + idOrName: vercelProjectId, + ...(teamId && { teamId }), + requestBody: {}, + }), + "Failed to get Vercel project autoAssignCustomDomains", + { vercelProjectId, teamId } + ).map((project) => project.autoAssignCustomDomains ?? null); + } + + /** Disable autoAssignCustomDomains — required for atomic deployments. */ + static disableAutoAssignCustomDomains( + client: Vercel, + vercelProjectId: string, + teamId?: string | null + ): ResultAsync { + return wrapVercelCall( + client.projects.updateProject({ + idOrName: vercelProjectId, + ...(teamId && { teamId }), + requestBody: { + autoAssignCustomDomains: false, + }, + }), + "Failed to disable autoAssignCustomDomains", + { vercelProjectId, teamId } + ).map(() => undefined); + } + + static uninstallVercelIntegration( + integration: OrganizationIntegration & { tokenReference: SecretReference } + ): ResultAsync<{ authInvalid: boolean }, VercelApiError> { + return this.getVercelClient(integration).andThen((client) => + ResultAsync.fromPromise( + (async () => { + const secret = await getSecretStore(integration.tokenReference.provider).getSecret( + VercelSecretSchema, + integration.tokenReference.key + ); + + if (!secret?.installationId) { + throw new Error("Installation ID not found in Vercel integration"); + } + + return secret.installationId; + })(), + toVercelApiError + ).andThen((installationId) => + ResultAsync.fromPromise( + client.integrations.deleteConfiguration({ + id: installationId, + }), + (error) => error + ) + .map(() => ({ authInvalid: false })) + .orElse((error) => { + const isAuthError = isVercelAuthError(error); + logger.error("Failed to uninstall Vercel integration", { + installationId, + error: error instanceof Error ? error.message : "Unknown error", + isAuthError, + }); + // Auth errors (401/403): still clean up on our side, return flag for caller + if (isAuthError) { + return okAsync({ authInvalid: true }); + } + return errAsync(toVercelApiError(error)); + }) + ) + ); + } +} diff --git a/apps/webapp/app/presenters/v3/ApiKeysPresenter.server.ts b/apps/webapp/app/presenters/v3/ApiKeysPresenter.server.ts index 1ff560586a9..78e64403e74 100644 --- a/apps/webapp/app/presenters/v3/ApiKeysPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/ApiKeysPresenter.server.ts @@ -38,6 +38,11 @@ export class ApiKeysPresenter { apiKey: true, }, }, + project: { + select: { + id: true, + }, + }, }, where: { project: { @@ -64,11 +69,22 @@ export class ApiKeysPresenter { throw new Error("Environment not found"); } + const vercelIntegration = + await this.#prismaClient.organizationProjectIntegration.findFirst({ + where: { + projectId: environment.project.id, + deletedAt: null, + organizationIntegration: { service: "VERCEL", deletedAt: null }, + }, + select: { id: true }, + }); + return { environment: { ...environment, apiKey: environment?.parentEnvironment?.apiKey ?? environment?.apiKey, }, + hasVercelIntegration: vercelIntegration !== null, }; } } diff --git a/apps/webapp/app/presenters/v3/DeploymentListPresenter.server.ts b/apps/webapp/app/presenters/v3/DeploymentListPresenter.server.ts index 0b920e29421..1f5996f9967 100644 --- a/apps/webapp/app/presenters/v3/DeploymentListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/DeploymentListPresenter.server.ts @@ -1,5 +1,5 @@ import { - type Prisma, + Prisma, type WorkerDeploymentStatus, type WorkerInstanceGroupType, } from "@trigger.dev/database"; @@ -10,6 +10,7 @@ import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; import { type User } from "~/models/user.server"; import { processGitMetadata } from "./BranchesPresenter.server"; import { BranchTrackingConfigSchema, getTrackedBranchForEnvironment } from "~/v3/github"; +import { VercelProjectIntegrationDataSchema } from "~/v3/vercel/vercelProjectIntegrationSchema"; const pageSize = 20; @@ -105,6 +106,51 @@ export class DeploymentListPresenter { }, }); + // Check for Vercel integration before the main query so we can conditionally LEFT JOIN + let hasVercelIntegration = false; + let vercelTeamSlug: string | undefined; + let vercelProjectName: string | undefined; + + const vercelProjectIntegration = + await this.#prismaClient.organizationProjectIntegration.findFirst({ + where: { + projectId: project.id, + deletedAt: null, + organizationIntegration: { + service: "VERCEL", + deletedAt: null, + }, + }, + select: { + integrationData: true, + }, + }); + + if (vercelProjectIntegration) { + const parsed = VercelProjectIntegrationDataSchema.safeParse( + vercelProjectIntegration.integrationData + ); + + if (parsed.success && parsed.data.vercelTeamSlug) { + hasVercelIntegration = true; + vercelTeamSlug = parsed.data.vercelTeamSlug; + vercelProjectName = parsed.data.vercelProjectName; + } + } + + const vercelSelect = hasVercelIntegration + ? Prisma.sql`, id_dep."integrationDeploymentId"` + : Prisma.sql``; + const vercelJoin = hasVercelIntegration + ? Prisma.sql`LEFT JOIN LATERAL ( + SELECT id_inner."integrationDeploymentId" + FROM ${sqlDatabaseSchema}."IntegrationDeployment" as id_inner + WHERE id_inner."deploymentId" = wd."id" AND id_inner."integrationName" = 'vercel' + ORDER BY id_inner."createdAt" DESC + LIMIT 1 + ) id_dep ON true` + : Prisma.sql``; + const deployments = await this.#prismaClient.$queryRaw< { id: string; @@ -123,6 +169,7 @@ export class DeploymentListPresenter { userAvatarUrl: string | null; type: WorkerInstanceGroupType; git: Prisma.JsonValue | null; + integrationDeploymentId: string | null; }[] >` SELECT @@ -142,10 +189,12 @@ export class DeploymentListPresenter { wd."deployedAt", wd."type", wd."git" + ${vercelSelect} FROM ${sqlDatabaseSchema}."WorkerDeployment" as wd LEFT JOIN ${sqlDatabaseSchema}."User" as u ON wd."triggeredById" = u."id" +${vercelJoin} WHERE wd."projectId" = ${project.id} AND wd."environmentId" = ${environment.id} @@ -173,6 +222,7 @@ LIMIT ${pageSize} OFFSET ${pageSize * (page - 1)};`; return { currentPage: page, totalPages: Math.ceil(totalCount / pageSize), + hasVercelIntegration, connectedGithubRepository: project.connectedGithubRepository ?? undefined, environmentGitHubBranch, deployments: deployments.map((deployment, index) => { @@ -180,6 +230,12 @@ LIMIT ${pageSize} OFFSET ${pageSize * (page - 1)};`; (labeledDeployment) => labeledDeployment.deploymentId === deployment.id ); + let vercelDeploymentUrl: string | null = null; + if (hasVercelIntegration && deployment.integrationDeploymentId && vercelTeamSlug && vercelProjectName) { + const vercelId = deployment.integrationDeploymentId.replace(/^dpl_/, ""); + vercelDeploymentUrl = `https://vercel.com/${vercelTeamSlug}/${vercelProjectName}/${vercelId}`; + } + return { id: deployment.id, shortCode: deployment.shortCode, @@ -210,6 +266,7 @@ LIMIT ${pageSize} OFFSET ${pageSize * (page - 1)};`; } : undefined, git: processGitMetadata(deployment.git), + vercelDeploymentUrl, }; }), }; diff --git a/apps/webapp/app/presenters/v3/EnvironmentVariablesPresenter.server.ts b/apps/webapp/app/presenters/v3/EnvironmentVariablesPresenter.server.ts index 730591f4ebc..aff55263fec 100644 --- a/apps/webapp/app/presenters/v3/EnvironmentVariablesPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/EnvironmentVariablesPresenter.server.ts @@ -1,9 +1,14 @@ -import { flipCauseOption } from "effect/Cause"; import { PrismaClient, prisma } from "~/db.server"; import { Project } from "~/models/project.server"; import { User } from "~/models/user.server"; import { filterOrphanedEnvironments, sortEnvironments } from "~/utils/environmentSort"; import { EnvironmentVariablesRepository } from "~/v3/environmentVariables/environmentVariablesRepository.server"; +import type { EnvironmentVariableUpdater } from "~/v3/environmentVariables/repository"; +import { + SyncEnvVarsMapping, + EnvSlug, +} from "~/v3/vercel/vercelProjectIntegrationSchema"; +import { VercelIntegrationService } from "~/services/vercelIntegration.server"; type Result = Awaited>; export type EnvironmentVariableWithSetValues = Result["environmentVariables"][number]; @@ -44,6 +49,9 @@ export class EnvironmentVariablesPresenter { select: { id: true, environmentId: true, + version: true, + lastUpdatedBy: true, + updatedAt: true, valueReference: { select: { key: true, @@ -67,6 +75,42 @@ export class EnvironmentVariablesPresenter { }, }); + const userIds = new Set( + environmentVariables + .flatMap((envVar) => envVar.values) + .map((value) => value.lastUpdatedBy) + .filter( + (lastUpdatedBy): lastUpdatedBy is { type: "user"; userId: string } => + lastUpdatedBy !== null && + typeof lastUpdatedBy === "object" && + "type" in lastUpdatedBy && + lastUpdatedBy.type === "user" && + "userId" in lastUpdatedBy && + typeof lastUpdatedBy.userId === "string" + ) + .map((lastUpdatedBy) => lastUpdatedBy.userId) + ); + + const users = + userIds.size > 0 + ? await this.#prismaClient.user.findMany({ + where: { + id: { + in: Array.from(userIds), + }, + }, + select: { + id: true, + name: true, + displayName: true, + avatarUrl: true, + }, + }) + : []; + + const usersRecord: Record = + Object.fromEntries(users.map((u) => [u.id, u])); + const environments = await this.#prismaClient.runtimeEnvironment.findMany({ select: { id: true, @@ -94,6 +138,18 @@ export class EnvironmentVariablesPresenter { const repository = new EnvironmentVariablesRepository(this.#prismaClient); const variables = await repository.getProject(project.id); + // Get Vercel integration data if it exists + const vercelService = new VercelIntegrationService(this.#prismaClient); + const vercelIntegration = await vercelService.getVercelProjectIntegration(project.id); + + let vercelSyncEnvVarsMapping: SyncEnvVarsMapping = {}; + let vercelPullEnvVarsBeforeBuild: EnvSlug[] | null = null; + + if (vercelIntegration) { + vercelSyncEnvVarsMapping = vercelIntegration.parsedIntegrationData.syncEnvVarsMapping; + vercelPullEnvVarsBeforeBuild = vercelIntegration.parsedIntegrationData.config.pullEnvVarsBeforeBuild ?? null; + } + return { environmentVariables: environmentVariables .flatMap((environmentVariable) => { @@ -101,13 +157,29 @@ export class EnvironmentVariablesPresenter { return sortedEnvironments.flatMap((env) => { const val = variable?.values.find((v) => v.environment.id === env.id); - const isSecret = - environmentVariable.values.find((v) => v.environmentId === env.id)?.isSecret ?? false; + const valueRecord = environmentVariable.values.find((v) => v.environmentId === env.id); + const isSecret = valueRecord?.isSecret ?? false; - if (!val) { + if (!val || !valueRecord) { return []; } + const lastUpdatedBy = valueRecord.lastUpdatedBy as EnvironmentVariableUpdater | null; + + const updatedByUser = + lastUpdatedBy?.type === "user" + ? (() => { + const user = usersRecord[lastUpdatedBy.userId]; + return user + ? { + id: user.id, + name: user.displayName || user.name || "Unknown", + avatarUrl: user.avatarUrl, + } + : null; + })() + : null; + return [ { id: environmentVariable.id, @@ -115,6 +187,10 @@ export class EnvironmentVariablesPresenter { environment: { type: env.type, id: env.id, branchName: env.branchName }, value: isSecret ? "" : val.value, isSecret, + version: valueRecord.version, + lastUpdatedBy, + updatedByUser, + updatedAt: valueRecord.updatedAt, }, ]; }); @@ -127,6 +203,14 @@ export class EnvironmentVariablesPresenter { branchName: environment.branchName, })), hasStaging: environments.some((environment) => environment.type === "STAGING"), + // Vercel integration data + vercelIntegration: vercelIntegration + ? { + enabled: true, + pullEnvVarsBeforeBuild: vercelPullEnvVarsBeforeBuild, + syncEnvVarsMapping: vercelSyncEnvVarsMapping, + } + : null, }; } } diff --git a/apps/webapp/app/presenters/v3/VercelSettingsPresenter.server.ts b/apps/webapp/app/presenters/v3/VercelSettingsPresenter.server.ts new file mode 100644 index 00000000000..d92fdbf7f7a --- /dev/null +++ b/apps/webapp/app/presenters/v3/VercelSettingsPresenter.server.ts @@ -0,0 +1,585 @@ +import { type PrismaClient } from "@trigger.dev/database"; +import { type Result, fromPromise, ok, okAsync, ResultAsync } from "neverthrow"; +import { env } from "~/env.server"; +import { logger } from "~/services/logger.server"; +import { OrgIntegrationRepository } from "~/models/orgIntegration.server"; +import { + VercelIntegrationRepository, + VercelCustomEnvironment, + VercelEnvironmentVariable, +} from "~/models/vercelIntegration.server"; +import { type GitHubAppInstallation } from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github"; +import { EnvironmentVariablesRepository } from "~/v3/environmentVariables/environmentVariablesRepository.server"; +import { + VercelProjectIntegrationDataSchema, + VercelProjectIntegrationData, +} from "~/v3/vercel/vercelProjectIntegrationSchema"; +import { BasePresenter } from "./basePresenter.server"; + +type VercelSettingsOptions = { + projectId: string; + organizationId: string; +}; + +export type VercelSettingsResult = { + enabled: boolean; + hasOrgIntegration: boolean; + authInvalid?: boolean; + connectedProject?: { + id: string; + vercelProjectId: string; + vercelProjectName: string; + vercelTeamId: string | null; + integrationData: VercelProjectIntegrationData; + createdAt: Date; + }; + isGitHubConnected: boolean; + hasStagingEnvironment: boolean; + hasPreviewEnvironment: boolean; + customEnvironments: VercelCustomEnvironment[]; + /** Whether autoAssignCustomDomains is enabled on the Vercel project. null if unknown. */ + autoAssignCustomDomains?: boolean | null; +}; + +export type VercelAvailableProject = { + id: string; + name: string; +}; + +export type VercelOnboardingData = { + customEnvironments: VercelCustomEnvironment[]; + environmentVariables: VercelEnvironmentVariable[]; + availableProjects: VercelAvailableProject[]; + hasProjectSelected: boolean; + authInvalid?: boolean; + existingVariables: Record; // Environment slugs (non-archived only) + gitHubAppInstallations: GitHubAppInstallation[]; + isGitHubConnected: boolean; + isOnboardingComplete: boolean; +}; + +export class VercelSettingsPresenter extends BasePresenter { + /** + * Get Vercel integration settings for the settings page + */ + public async call({ projectId, organizationId }: VercelSettingsOptions): Promise> { + const vercelIntegrationEnabled = OrgIntegrationRepository.isVercelSupported; + + if (!vercelIntegrationEnabled) { + return ok({ + enabled: false, + hasOrgIntegration: false, + authInvalid: false, + connectedProject: undefined, + isGitHubConnected: false, + hasStagingEnvironment: false, + hasPreviewEnvironment: false, + customEnvironments: [], + } as VercelSettingsResult); + } + + const orgIntegrationResult = await fromPromise( + (this._replica as PrismaClient).organizationIntegration.findFirst({ + where: { + organizationId, + service: "VERCEL", + deletedAt: null, + }, + include: { + tokenReference: true, + }, + }), + (error) => error + ); + + if (orgIntegrationResult.isErr()) { + logger.error("Unexpected error in VercelSettingsPresenter.call", { error: orgIntegrationResult.error }); + return ok({ + enabled: true, + hasOrgIntegration: false, + authInvalid: true, + connectedProject: undefined, + isGitHubConnected: false, + hasStagingEnvironment: false, + hasPreviewEnvironment: false, + customEnvironments: [], + } as VercelSettingsResult); + } + + const orgIntegration = orgIntegrationResult.value; + const hasOrgIntegration = orgIntegration !== null; + + if (hasOrgIntegration) { + const tokenResult = await VercelIntegrationRepository.validateVercelToken(orgIntegration); + if (tokenResult.isErr() || !tokenResult.value.isValid) { + return ok({ + enabled: true, + hasOrgIntegration: true, + authInvalid: true, + connectedProject: undefined, + isGitHubConnected: false, + hasStagingEnvironment: false, + hasPreviewEnvironment: false, + customEnvironments: [], + } as VercelSettingsResult); + } + } + + const checkOrgIntegration = () => fromPromise( + Promise.resolve(hasOrgIntegration), + (error) => ({ + type: "other" as const, + cause: error, + }) + ); + + const checkGitHubConnection = () => + fromPromise( + (this._replica as PrismaClient).connectedGithubRepository.findFirst({ + where: { + projectId, + repository: { + installation: { + deletedAt: null, + suspendedAt: null, + }, + }, + }, + select: { + id: true, + }, + }), + (error) => ({ + type: "other" as const, + cause: error, + }) + ).map((repo) => repo !== null); + + const checkStagingEnvironment = () => + fromPromise( + (this._replica as PrismaClient).runtimeEnvironment.findFirst({ + select: { + id: true, + }, + where: { + projectId, + type: "STAGING", + }, + }), + (error) => ({ + type: "other" as const, + cause: error, + }) + ).map((env) => env !== null); + + const checkPreviewEnvironment = () => + fromPromise( + (this._replica as PrismaClient).runtimeEnvironment.findFirst({ + select: { + id: true, + }, + where: { + projectId, + type: "PREVIEW", + }, + }), + (error) => ({ + type: "other" as const, + cause: error, + }) + ).map((env) => env !== null); + + const getVercelProjectIntegration = () => + fromPromise( + (this._replica as PrismaClient).organizationProjectIntegration.findFirst({ + where: { + projectId, + deletedAt: null, + organizationIntegration: { + service: "VERCEL", + deletedAt: null, + }, + }, + include: { + organizationIntegration: true, + }, + }), + (error) => ({ + type: "other" as const, + cause: error, + }) + ).map((integration) => { + if (!integration) { + return undefined; + } + + const parsedData = VercelProjectIntegrationDataSchema.safeParse( + integration.integrationData + ); + + if (!parsedData.success) { + return undefined; + } + + return { + id: integration.id, + vercelProjectId: integration.externalEntityId, + vercelProjectName: parsedData.data.vercelProjectName, + vercelTeamId: parsedData.data.vercelTeamId, + integrationData: parsedData.data, + createdAt: integration.createdAt, + }; + }); + + return ResultAsync.combine([ + checkOrgIntegration(), + checkGitHubConnection(), + checkStagingEnvironment(), + checkPreviewEnvironment(), + getVercelProjectIntegration(), + ]).andThen(([hasOrgIntegration, isGitHubConnected, hasStagingEnvironment, hasPreviewEnvironment, connectedProject]) => { + const fetchCustomEnvsAndProjectSettings = async (): Promise<{ + customEnvironments: VercelCustomEnvironment[]; + autoAssignCustomDomains: boolean | null; + }> => { + if (!connectedProject || !orgIntegration) { + return { customEnvironments: [], autoAssignCustomDomains: null }; + } + const clientResult = await VercelIntegrationRepository.getVercelClient(orgIntegration); + if (clientResult.isErr()) { + return { customEnvironments: [], autoAssignCustomDomains: null }; + } + const client = clientResult.value; + const teamId = await VercelIntegrationRepository.getTeamIdFromIntegration(orgIntegration); + const [customEnvsResult, autoAssignResult] = await Promise.all([ + VercelIntegrationRepository.getVercelCustomEnvironments( + client, + connectedProject.vercelProjectId, + teamId + ), + VercelIntegrationRepository.getAutoAssignCustomDomains( + client, + connectedProject.vercelProjectId, + teamId + ), + ]); + return { + customEnvironments: customEnvsResult.isOk() ? customEnvsResult.value : [], + autoAssignCustomDomains: autoAssignResult.isOk() ? autoAssignResult.value : null, + }; + }; + + return fromPromise( + fetchCustomEnvsAndProjectSettings(), + (error) => ({ type: "other" as const, cause: error }) + ).map(({ customEnvironments, autoAssignCustomDomains }) => ({ + enabled: true, + hasOrgIntegration, + authInvalid: false, + connectedProject, + isGitHubConnected, + hasStagingEnvironment, + hasPreviewEnvironment, + customEnvironments, + autoAssignCustomDomains, + } as VercelSettingsResult)); + }).mapErr((error) => { + // Log the error and return a safe fallback + logger.error("Error in VercelSettingsPresenter.call", { error }); + return error; + }); + } + + /** + * Get data needed for the onboarding modal (custom environments and env vars) + */ + public async getOnboardingData( + projectId: string, + organizationId: string, + vercelEnvironmentId?: string + ): Promise { + const result = await ResultAsync.fromPromise( + (async (): Promise => { + const [gitHubInstallations, connectedGitHubRepo] = await Promise.all([ + (this._replica as PrismaClient).githubAppInstallation.findMany({ + where: { + organizationId, + deletedAt: null, + suspendedAt: null, + }, + select: { + id: true, + accountHandle: true, + targetType: true, + appInstallationId: true, + repositories: { + select: { + id: true, + name: true, + fullName: true, + htmlUrl: true, + private: true, + }, + take: 200, + }, + }, + take: 20, + orderBy: { + createdAt: "desc", + }, + }), + (this._replica as PrismaClient).connectedGithubRepository.findFirst({ + where: { + projectId, + repository: { + installation: { + deletedAt: null, + suspendedAt: null, + }, + }, + }, + select: { + id: true, + }, + }), + ]); + + const isGitHubConnected = connectedGitHubRepo !== null; + const gitHubAppInstallations: GitHubAppInstallation[] = gitHubInstallations.map((installation) => ({ + id: installation.id, + appInstallationId: installation.appInstallationId, + targetType: installation.targetType, + accountHandle: installation.accountHandle, + repositories: installation.repositories.map((repo) => ({ + id: repo.id, + name: repo.name, + fullName: repo.fullName, + private: repo.private, + htmlUrl: repo.htmlUrl, + })), + })); + + const orgIntegration = await (this._replica as PrismaClient).organizationIntegration.findFirst({ + where: { + organizationId, + service: "VERCEL", + deletedAt: null, + }, + include: { + tokenReference: true, + }, + }); + + if (!orgIntegration) { + return null; + } + + const tokenResult = await VercelIntegrationRepository.validateVercelToken(orgIntegration); + if (tokenResult.isErr() || !tokenResult.value.isValid) { + return { + customEnvironments: [], + environmentVariables: [], + availableProjects: [], + hasProjectSelected: false, + authInvalid: true, + existingVariables: {}, + gitHubAppInstallations, + isGitHubConnected, + isOnboardingComplete: false, + }; + } + + const clientResult = await VercelIntegrationRepository.getVercelClient(orgIntegration); + if (clientResult.isErr()) { + return { + customEnvironments: [], + environmentVariables: [], + availableProjects: [], + hasProjectSelected: false, + authInvalid: clientResult.error.authInvalid, + existingVariables: {}, + gitHubAppInstallations, + isGitHubConnected, + isOnboardingComplete: false, + }; + } + const client = clientResult.value; + const teamId = await VercelIntegrationRepository.getTeamIdFromIntegration(orgIntegration); + + const projectIntegration = await (this._replica as PrismaClient).organizationProjectIntegration.findFirst({ + where: { + projectId, + deletedAt: null, + organizationIntegration: { + service: "VERCEL", + deletedAt: null, + }, + }, + }); + + const availableProjectsResult = await VercelIntegrationRepository.getVercelProjects(client, teamId); + + if (availableProjectsResult.isErr()) { + return { + customEnvironments: [], + environmentVariables: [], + availableProjects: [], + hasProjectSelected: false, + authInvalid: availableProjectsResult.error.authInvalid, + existingVariables: {}, + gitHubAppInstallations, + isGitHubConnected, + isOnboardingComplete: false, + }; + } + + if (!projectIntegration) { + return { + customEnvironments: [], + environmentVariables: [], + availableProjects: availableProjectsResult.value, + hasProjectSelected: false, + existingVariables: {}, + gitHubAppInstallations, + isGitHubConnected, + isOnboardingComplete: false, + }; + } + + const [customEnvironmentsResult, projectEnvVarsResult, sharedEnvVarsResult] = await Promise.all([ + VercelIntegrationRepository.getVercelCustomEnvironments( + client, + projectIntegration.externalEntityId, + teamId + ), + VercelIntegrationRepository.getVercelEnvironmentVariables( + client, + projectIntegration.externalEntityId, + teamId + ), + // Only fetch shared env vars if teamId is available + teamId + ? VercelIntegrationRepository.getVercelSharedEnvironmentVariables( + client, + teamId, + projectIntegration.externalEntityId + ) + : okAsync([] as Array<{ id: string; key: string; type: string; isSecret: boolean; target: string[] }>), + ]); + const authInvalid = + (customEnvironmentsResult.isErr() && customEnvironmentsResult.error.authInvalid) || + (projectEnvVarsResult.isErr() && projectEnvVarsResult.error.authInvalid) || + (sharedEnvVarsResult.isErr() && sharedEnvVarsResult.error.authInvalid); + + if (authInvalid) { + return { + customEnvironments: [], + environmentVariables: [], + availableProjects: availableProjectsResult.value, + hasProjectSelected: true, + authInvalid: true, + existingVariables: {}, + gitHubAppInstallations, + isGitHubConnected, + isOnboardingComplete: false, + }; + } + + const customEnvironments = customEnvironmentsResult.isOk() ? customEnvironmentsResult.value : []; + const projectEnvVars = projectEnvVarsResult.isOk() ? projectEnvVarsResult.value : []; + const sharedEnvVars = sharedEnvVarsResult.isOk() ? sharedEnvVarsResult.value : []; + + // Filter out TRIGGER_SECRET_KEY and TRIGGER_VERSION (managed by Trigger.dev) and merge project + shared env vars + const excludedKeys = new Set(["TRIGGER_SECRET_KEY", "TRIGGER_VERSION"]); + const projectEnvVarKeys = new Set(projectEnvVars.map((v) => v.key)); + const mergedEnvVars: VercelEnvironmentVariable[] = [ + ...projectEnvVars + .filter((v) => !excludedKeys.has(v.key)) + .map((v) => { + const envVar = { ...v }; + if (vercelEnvironmentId && (v as any).customEnvironmentIds?.includes(vercelEnvironmentId)) { + envVar.target = [...v.target, 'staging']; + } + return envVar; + }), + ...sharedEnvVars + .filter((v) => !projectEnvVarKeys.has(v.key) && !excludedKeys.has(v.key)) + .map((v) => { + const envVar = { + id: v.id, + key: v.key, + type: v.type as VercelEnvironmentVariable["type"], + isSecret: v.isSecret, + target: v.target, + isShared: true, + customEnvironmentIds: [] as string[], + }; + if (vercelEnvironmentId && (v as any).customEnvironmentIds?.includes(vercelEnvironmentId)) { + envVar.target = [...v.target, 'staging']; + } + return envVar; + }), + ]; + + const sortedEnvVars = [...mergedEnvVars].sort((a, b) => + a.key.localeCompare(b.key) + ); + + const projectEnvs = await (this._replica as PrismaClient).runtimeEnvironment.findMany({ + where: { + projectId, + archivedAt: null, // Filter out archived environments + }, + select: { + id: true, + slug: true, + type: true, + }, + }); + const envIdToSlug = new Map(projectEnvs.map((e) => [e.id, e.slug])); + const activeEnvIds = new Set(projectEnvs.map((e) => e.id)); + + const envVarRepository = new EnvironmentVariablesRepository(this._replica as PrismaClient); + const existingVariables = await envVarRepository.getProject(projectId); + const existingVariablesRecord: Record = {}; + for (const v of existingVariables) { + // Filter out archived environments and map to slugs + const activeEnvSlugs = v.values + .filter((val) => activeEnvIds.has(val.environment.id)) + .map((val) => envIdToSlug.get(val.environment.id) || val.environment.type.toLowerCase()); + if (activeEnvSlugs.length > 0) { + existingVariablesRecord[v.key] = { + environments: activeEnvSlugs, + }; + } + } + + const parsedIntegrationData = VercelProjectIntegrationDataSchema.safeParse( + projectIntegration.integrationData + ); + + return { + customEnvironments, + environmentVariables: sortedEnvVars, + availableProjects: availableProjectsResult.value, + hasProjectSelected: true, + existingVariables: existingVariablesRecord, + gitHubAppInstallations, + isGitHubConnected, + isOnboardingComplete: parsedIntegrationData.success + ? (parsedIntegrationData.data.onboardingCompleted ?? false) + : false, + }; + })(), + (error) => error + ); + + if (result.isErr()) { + logger.error("Error in getOnboardingData", { error: result.error }); + return null; + } + + return result.value; + } + +} \ No newline at end of file diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.apikeys/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.apikeys/route.tsx index acbf29c4f3f..897687f4ec9 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.apikeys/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.apikeys/route.tsx @@ -51,7 +51,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { try { const presenter = new ApiKeysPresenter(); - const { environment } = await presenter.call({ + const { environment, hasVercelIntegration } = await presenter.call({ userId, projectSlug: projectParam, environmentSlug: envParam, @@ -59,6 +59,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { return typedjson({ environment, + hasVercelIntegration, }); } catch (error) { console.error(error); @@ -70,7 +71,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { }; export default function Page() { - const { environment } = useTypedLoaderData(); + const { environment, hasVercelIntegration } = useTypedLoaderData(); const organization = useOrganization(); if (!environment) { @@ -132,6 +133,8 @@ export default function Page() {
(); const hasDeployments = totalPages > 0; @@ -234,6 +237,7 @@ export default function Page() { Deployed at Deployed by Git + {hasVercelIntegration && Linked} Go to page @@ -307,6 +311,28 @@ export default function Page() { + {hasVercelIntegration && ( + + {deployment.vercelDeploymentUrl ? ( + e.stopPropagation()} + > + + + } + content="View on Vercel" + /> + ) : ( + "–" + )} + + )} + No deploys match your filters diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.environment-variables.new/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.environment-variables.new/route.tsx index c52942a8acb..86bd5bbc95d 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.environment-variables.new/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.environment-variables.new/route.tsx @@ -151,7 +151,13 @@ export const action = async ({ request, params }: ActionFunctionArgs) => { } const repository = new EnvironmentVariablesRepository(prisma); - const result = await repository.create(project.id, submission.value); + const result = await repository.create(project.id, { + ...submission.value, + lastUpdatedBy: { + type: "user", + userId, + }, + }); if (!result.success) { if (result.variableErrors) { diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.environment-variables/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.environment-variables/route.tsx index 80976d41fcc..2670f0188df 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.environment-variables/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.environment-variables/route.tsx @@ -9,7 +9,7 @@ import { PlusIcon, TrashIcon, } from "@heroicons/react/20/solid"; -import { Form, type MetaFunction, Outlet, useActionData, useFetcher, useNavigation } from "@remix-run/react"; +import { Form, type MetaFunction, Outlet, useActionData, useFetcher, useNavigation, useRevalidator } from "@remix-run/react"; import { type ActionFunctionArgs, type LoaderFunctionArgs, @@ -19,10 +19,12 @@ import { useEffect, useMemo, useState } from "react"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { z } from "zod"; import { EnvironmentCombo } from "~/components/environments/EnvironmentLabel"; +import { VercelLogo } from "~/components/integrations/VercelLogo"; import { PageBody, PageContainer } from "~/components/layout/AppLayout"; import { Button, LinkButton } from "~/components/primitives/Buttons"; import { ClipboardField } from "~/components/primitives/ClipboardField"; import { CopyableText } from "~/components/primitives/CopyableText"; +import { DateTime } from "~/components/primitives/DateTime"; import { Dialog, DialogContent, DialogHeader, DialogTrigger } from "~/components/primitives/Dialog"; import { Fieldset } from "~/components/primitives/Fieldset"; import { FormButtons } from "~/components/primitives/FormButtons"; @@ -70,6 +72,11 @@ import { EditEnvironmentVariableValue, EnvironmentVariable, } from "~/v3/environmentVariables/repository"; +import { UserAvatar } from "~/components/UserProfilePhoto"; +import { VercelIntegrationService } from "~/services/vercelIntegration.server"; +import { fromPromise } from "neverthrow"; +import { logger } from "~/services/logger.server"; +import { shouldSyncEnvVar, isPullEnvVarsEnabledForEnvironment, type TriggerEnvironmentType } from "~/v3/vercel/vercelProjectIntegrationSchema"; export const meta: MetaFunction = () => { return [ @@ -85,7 +92,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { try { const presenter = new EnvironmentVariablesPresenter(); - const { environmentVariables, environments, hasStaging } = await presenter.call({ + const { environmentVariables, environments, hasStaging, vercelIntegration } = await presenter.call({ userId, projectSlug: projectParam, }); @@ -94,6 +101,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { environmentVariables, environments, hasStaging, + vercelIntegration, }); } catch (error) { console.error(error); @@ -111,6 +119,12 @@ const schema = z.discriminatedUnion("action", [ key: z.string(), ...DeleteEnvironmentVariableValue.shape, }), + z.object({ + action: z.literal("update-vercel-sync"), + key: z.string(), + environmentType: z.enum(["PRODUCTION", "STAGING", "PREVIEW", "DEVELOPMENT"]), + syncEnabled: z.union([z.literal("true"), z.literal("false")]).transform((val) => val === "true"), + }), ]); export const action = async ({ request, params }: ActionFunctionArgs) => { @@ -151,7 +165,13 @@ export const action = async ({ request, params }: ActionFunctionArgs) => { switch (submission.value.action) { case "edit": { const repository = new EnvironmentVariablesRepository(prisma); - const result = await repository.editValue(project.id, submission.value); + const result = await repository.editValue(project.id, { + ...submission.value, + lastUpdatedBy: { + type: "user", + userId, + }, + }); if (!result.success) { submission.error.key = [result.error]; @@ -169,6 +189,32 @@ export const action = async ({ request, params }: ActionFunctionArgs) => { return json(submission); } + // Clean up syncEnvVarsMapping if Vercel integration exists (best-effort) + const { environmentId, key } = submission.value; + const vercelService = new VercelIntegrationService(); + await fromPromise( + (async () => { + const integration = await vercelService.getVercelProjectIntegration(project.id); + if (integration) { + const runtimeEnv = await prisma.runtimeEnvironment.findUnique({ + where: { id: environmentId }, + select: { type: true }, + }); + if (runtimeEnv) { + await vercelService.removeSyncEnvVarForEnvironment( + project.id, + key, + runtimeEnv.type as TriggerEnvironmentType + ); + } + } + })(), + (error) => error + ).mapErr((error) => { + logger.error("Failed to remove Vercel sync mapping", { error }); + return error; + }); + return redirectWithSuccessMessage( v3EnvironmentVariablesPath( { slug: organizationSlug }, @@ -179,12 +225,31 @@ export const action = async ({ request, params }: ActionFunctionArgs) => { `Deleted ${submission.value.key} environment variable` ); } + case "update-vercel-sync": { + const vercelService = new VercelIntegrationService(); + const integration = await vercelService.getVercelProjectIntegration(project.id); + + if (!integration) { + submission.error.key = ["Vercel integration not found"]; + return json(submission); + } + + // Update the sync mapping for the specific env var and environment + await vercelService.updateSyncEnvVarForEnvironment( + project.id, + submission.value.key, + submission.value.environmentType, + submission.value.syncEnabled + ); + + return json({ success: true }); + } } }; export default function Page() { const [revealAll, setRevealAll] = useState(false); - const { environmentVariables, environments } = useTypedLoaderData(); + const { environmentVariables, environments, vercelIntegration } = useTypedLoaderData(); const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); @@ -279,10 +344,32 @@ export default function Page() { - Key - Value - Environment - + + Key + + + Value + + + Environment + + {vercelIntegration?.enabled && ( + + + Sync + + + } + content="When enabled, this variable will be pulled from Vercel during builds. Requires 'Pull env vars before build' to be enabled in settings." + /> + + )} + + Updated + + Actions @@ -341,9 +428,54 @@ export default function Page() { + {vercelIntegration?.enabled && ( + + {variable.environment.type !== "DEVELOPMENT" && ( + + )} + + )} + +
+ {variable.updatedByUser ? ( +
+ + {variable.updatedByUser.name} +
+ ) : (variable.lastUpdatedBy?.type === "integration" && variable.lastUpdatedBy?.integration === 'vercel' ) ? ( +
+ + + {variable.lastUpdatedBy.integration} + +
+ ) : null} + {variable.updatedAt ? ( + + + + ) : null} +
+
- + {environmentVariables.length === 0 ? (
You haven't set any environment variables yet. @@ -430,7 +562,7 @@ function EditEnvironmentVariablePanel({ @@ -526,8 +658,82 @@ function DeleteEnvironmentVariableButton({ leadingIconClassName="text-rose-500 group-hover/button:text-text-bright transition-colors" className="ml-0.5 transition-colors group-hover/button:bg-error" > - {isLoading ? "Deleting" : "Delete"} + {isLoading ? "Deleting" : ""} ); } + +/** + * Toggle component for controlling whether an environment variable is pulled from Vercel. + * + * When enabled, the variable will be pulled from Vercel during builds. + * By default, all variables are pulled unless explicitly disabled. + * + * Note: If the env slug is missing from syncEnvVarsMapping, all vars are pulled by default. + * Only when syncEnvVarsMapping[envSlug][envVarName] = false, the env var is skipped during builds. + */ +function VercelSyncCheckbox({ + envVarKey, + environmentType, + syncEnabled, + pullEnvVarsEnabledForEnv, +}: { + envVarKey: string; + environmentType: "PRODUCTION" | "STAGING" | "PREVIEW" | "DEVELOPMENT"; + syncEnabled: boolean; + pullEnvVarsEnabledForEnv: boolean; +}) { + const fetcher = useFetcher(); + const revalidator = useRevalidator(); + + const isLoading = fetcher.state !== "idle"; + + // Revalidate loader data after successful submission (without full page reload) + useEffect(() => { + if (fetcher.state === "idle" && fetcher.data) { + const data = fetcher.data as { success?: boolean }; + if (data.success) { + revalidator.revalidate(); + } + } + }, [fetcher.state, fetcher.data, revalidator]); + + const handleChange = (checked: boolean) => { + fetcher.submit( + { + action: "update-vercel-sync", + key: envVarKey, + environmentType, + syncEnabled: checked.toString(), + }, + { method: "post" } + ); + }; + + // If pull env vars is disabled for this environment, show disabled state + if (!pullEnvVarsEnabledForEnv) { + return ( + {}} + /> + } + content="Enable 'Pull env vars before build' for this environment in Vercel settings." + /> + ); + } + + return ( + + ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.settings/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.settings/route.tsx index 66ea64cb36f..a5a70c39af6 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.settings/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.settings/route.tsx @@ -38,12 +38,20 @@ import { import { ProjectSettingsService } from "~/services/projectSettings.server"; import { logger } from "~/services/logger.server"; import { requireUserId } from "~/services/session.server"; -import { organizationPath, v3ProjectPath, EnvironmentParamSchema, v3BillingPath } from "~/utils/pathBuilder"; -import React, { useEffect, useState } from "react"; +import { organizationPath, v3ProjectPath, EnvironmentParamSchema, v3BillingPath, vercelResourcePath } from "~/utils/pathBuilder"; +import React, { useEffect, useState, useCallback, useRef } from "react"; +import { useSearchParams } from "@remix-run/react"; import { useEnvironment } from "~/hooks/useEnvironment"; import { ProjectSettingsPresenter } from "~/services/projectSettingsPresenter.server"; import { type BuildSettings } from "~/v3/buildSettings"; import { GitHubSettingsPanel } from "../resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github"; +import { + VercelSettingsPanel, + VercelOnboardingModal, +} from "../resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel"; +import type { loader as vercelLoader } from "../resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel"; +import { OrgIntegrationRepository } from "~/models/orgIntegration.server"; +import { useTypedFetcher } from "remix-typedjson"; export const meta: MetaFunction = () => { return [ @@ -92,6 +100,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { return typedjson({ githubAppEnabled: gitHubApp.enabled, buildSettings, + vercelIntegrationEnabled: OrgIntegrationRepository.isVercelSupported, }); }; @@ -290,12 +299,121 @@ export const action: ActionFunction = async ({ request, params }) => { }; export default function Page() { - const { githubAppEnabled, buildSettings } = useTypedLoaderData(); + const { githubAppEnabled, buildSettings, vercelIntegrationEnabled } = + useTypedLoaderData(); const project = useProject(); const organization = useOrganization(); const environment = useEnvironment(); const lastSubmission = useActionData(); const navigation = useNavigation(); + const [searchParams, setSearchParams] = useSearchParams(); + + // Vercel onboarding modal state + const hasQueryParam = searchParams.get("vercelOnboarding") === "true"; + const nextUrl = searchParams.get("next"); + const [isModalOpen, setIsModalOpen] = useState(false); + const vercelFetcher = useTypedFetcher(); + + // Helper to open modal and ensure query param is present + const openVercelOnboarding = useCallback(() => { + setIsModalOpen(true); + // Ensure query param is present to maintain state during form submissions + if (!hasQueryParam) { + setSearchParams((prev) => { + prev.set("vercelOnboarding", "true"); + return prev; + }); + } + }, [hasQueryParam, setSearchParams]); + + const closeVercelOnboarding = useCallback(() => { + // Remove query param if present + if (hasQueryParam) { + setSearchParams((prev) => { + prev.delete("vercelOnboarding"); + return prev; + }); + } + // Close modal + setIsModalOpen(false); + }, [hasQueryParam, setSearchParams]); + + // When query param is present, handle modal opening + // Note: We don't close the modal based on data state during onboarding - only when explicitly closed + useEffect(() => { + if (hasQueryParam && vercelIntegrationEnabled) { + // Ensure query param is present and modal is open + if (vercelFetcher.data?.onboardingData && vercelFetcher.state === "idle") { + // Data is loaded, ensure modal is open (query param takes precedence) + if (!isModalOpen) { + openVercelOnboarding(); + } + } else if (vercelFetcher.state === "idle" && vercelFetcher.data === undefined) { + // Load onboarding data + vercelFetcher.load( + `${vercelResourcePath(organization.slug, project.slug, environment.slug)}?vercelOnboarding=true` + ); + } + } else if (!hasQueryParam && isModalOpen) { + // Query param removed but modal is open, close modal + setIsModalOpen(false); + } + }, [hasQueryParam, vercelIntegrationEnabled, organization.slug, project.slug, environment.slug, vercelFetcher.data, vercelFetcher.state, isModalOpen, openVercelOnboarding]); + + // Ensure modal stays open when query param is present (even after data reloads) + // This is a safeguard to prevent the modal from closing during form submissions + useEffect(() => { + if (hasQueryParam && !isModalOpen) { + // Query param is present but modal is closed, open it + // This ensures the modal stays open during the onboarding flow + openVercelOnboarding(); + } + }, [hasQueryParam, isModalOpen, openVercelOnboarding]); + + // When data finishes loading (from query param), ensure modal is open + useEffect(() => { + if (hasQueryParam && vercelFetcher.data?.onboardingData && vercelFetcher.state === "idle") { + // Data loaded and query param is present, ensure modal is open + if (!isModalOpen) { + openVercelOnboarding(); + } + } + }, [hasQueryParam, vercelFetcher.data, vercelFetcher.state, isModalOpen, openVercelOnboarding]); + + + // Track if we're waiting for data from button click (not query param) + const waitingForButtonClickRef = useRef(false); + + // Handle opening modal from button click (without query param) + const handleOpenVercelModal = useCallback(() => { + // Add query param to maintain state during form submissions + if (!hasQueryParam) { + setSearchParams((prev) => { + prev.set("vercelOnboarding", "true"); + return prev; + }); + } + + if (vercelFetcher.data && vercelFetcher.data.onboardingData) { + // Data already loaded, open modal immediately + openVercelOnboarding(); + } else { + // Need to load data first, mark that we're waiting for button click + waitingForButtonClickRef.current = true; + vercelFetcher.load( + `${vercelResourcePath(organization.slug, project.slug, environment.slug)}?vercelOnboarding=true` + ); + } + }, [organization.slug, project.slug, environment.slug, vercelFetcher, setSearchParams, hasQueryParam, openVercelOnboarding]); + + // When data loads from button click, open modal + useEffect(() => { + if (waitingForButtonClickRef.current && vercelFetcher.data?.onboardingData && vercelFetcher.state === "idle") { + // Data loaded from button click, open modal and ensure query param is present + waitingForButtonClickRef.current = false; + openVercelOnboarding(); + } + }, [vercelFetcher.data, vercelFetcher.state, openVercelOnboarding]); const [hasRenameFormChanges, setHasRenameFormChanges] = useState(false); @@ -425,6 +543,21 @@ export default function Page() {
+ {vercelIntegrationEnabled && ( +
+ Vercel integration +
+ +
+
+ )} +
Build settings
@@ -477,6 +610,29 @@ export default function Page() {
+ + {/* Vercel Onboarding Modal */} + {vercelIntegrationEnabled && ( + { + vercelFetcher.load( + `${vercelResourcePath(organization.slug, project.slug, environment.slug)}?vercelOnboarding=true${ + vercelEnvironmentId ? `&vercelEnvironmentId=${vercelEnvironmentId}` : "" + }` + ); + }} + /> + )} ); } diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.integrations.vercel.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.integrations.vercel.tsx new file mode 100644 index 00000000000..10b3f2283ce --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.integrations.vercel.tsx @@ -0,0 +1,375 @@ +import type { + ActionFunctionArgs, + LoaderFunctionArgs, +} from "@remix-run/node"; +import { json, redirect } from "@remix-run/node"; +import { fromPromise } from "neverthrow"; +import { Form, useActionData, useNavigation } from "@remix-run/react"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { DialogClose } from "@radix-ui/react-dialog"; +import { Button } from "~/components/primitives/Buttons"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "~/components/primitives/Dialog"; +import { FormButtons } from "~/components/primitives/FormButtons"; +import { Header1 } from "~/components/primitives/Headers"; +import { PageBody, PageContainer } from "~/components/layout/AppLayout"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import { Table, TableBody, TableCell, TableHeader, TableHeaderCell, TableRow } from "~/components/primitives/Table"; +import { VercelIntegrationRepository } from "~/models/vercelIntegration.server"; +import { $transaction, prisma } from "~/db.server"; +import { requireOrganization } from "~/services/org.server"; +import { OrganizationParamsSchema } from "~/utils/pathBuilder"; +import { logger } from "~/services/logger.server"; +import { TrashIcon } from "@heroicons/react/20/solid"; +import { v3ProjectSettingsPath } from "~/utils/pathBuilder"; +import { LinkButton } from "~/components/primitives/Buttons"; + +function formatDate(date: Date): string { + return new Intl.DateTimeFormat("en-US", { + month: "short", + day: "numeric", + year: "numeric", + hour: "numeric", + minute: "2-digit", + second: "2-digit", + hour12: true, + }).format(date); +} + +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const { organizationSlug } = OrganizationParamsSchema.parse(params); + const url = new URL(request.url); + const configurationId = url.searchParams.get("configurationId") ?? undefined; + const { organization } = await requireOrganization(request, organizationSlug); + + // Find Vercel integration for this organization + let vercelIntegration = await prisma.organizationIntegration.findFirst({ + where: { + organizationId: organization.id, + service: "VERCEL", + deletedAt: null, + // If configurationId is provided, filter by it in integrationData + ...(configurationId && { + integrationData: { + path: ["installationId"], + equals: configurationId, + }, + }), + }, + include: { + tokenReference: true, + }, + }); + + if (!vercelIntegration) { + return typedjson({ + organization, + vercelIntegration: null, + connectedProjects: [], + teamId: null, + installationId: null, + }); + } + + // Get team ID from integrationData + const integrationData = vercelIntegration.integrationData as any; + const teamId = integrationData?.teamId ?? null; + const installationId = integrationData?.installationId ?? null; + + // Get all connected projects for this integration + const connectedProjects = await prisma.organizationProjectIntegration.findMany({ + where: { + organizationIntegrationId: vercelIntegration.id, + deletedAt: null, + }, + include: { + project: { + select: { + id: true, + slug: true, + name: true, + }, + }, + }, + orderBy: { + createdAt: "desc", + }, + }); + + return typedjson({ + organization, + vercelIntegration, + connectedProjects, + teamId, + installationId, + }); +}; + +const ActionSchema = z.object({ + intent: z.literal("uninstall"), +}); + +export const action = async ({ request, params }: ActionFunctionArgs) => { + const { organizationSlug } = OrganizationParamsSchema.parse(params); + const { organization, userId } = await requireOrganization(request, organizationSlug); + + const formData = await request.formData(); + const result = ActionSchema.safeParse({ intent: formData.get("intent") }); + if (!result.success) { + return json({ error: "Invalid action" }, { status: 400 }); + } + + // Find Vercel integration + const vercelIntegration = await prisma.organizationIntegration.findFirst({ + where: { + organizationId: organization.id, + service: "VERCEL", + deletedAt: null, + }, + include: { + tokenReference: true, + }, + }); + + if (!vercelIntegration) { + return json({ error: "Vercel integration not found" }, { status: 404 }); + } + + // Uninstall from Vercel side + const uninstallResult = await VercelIntegrationRepository.uninstallVercelIntegration(vercelIntegration); + + if (uninstallResult.isErr()) { + logger.error("Failed to uninstall Vercel integration", { + organizationId: organization.id, + organizationSlug, + userId, + integrationId: vercelIntegration.id, + error: uninstallResult.error.message, + }); + + return json( + { error: "Failed to uninstall Vercel integration. Please try again." }, + { status: 500 } + ); + } + + // Soft-delete the integration and all connected projects in a transaction + const txResult = await fromPromise( + $transaction(prisma, async (tx) => { + await tx.organizationProjectIntegration.updateMany({ + where: { + organizationIntegrationId: vercelIntegration.id, + deletedAt: null, + }, + data: { deletedAt: new Date() }, + }); + + await tx.organizationIntegration.update({ + where: { id: vercelIntegration.id }, + data: { deletedAt: new Date() }, + }); + }), + (error) => error + ); + + if (txResult.isErr()) { + logger.error("Failed to soft-delete Vercel integration records", { + organizationId: organization.id, + organizationSlug, + userId, + integrationId: vercelIntegration.id, + error: txResult.error instanceof Error ? txResult.error.message : String(txResult.error), + }); + + return json( + { error: "Failed to uninstall Vercel integration. Please try again." }, + { status: 500 } + ); + } + + if (uninstallResult.value.authInvalid) { + logger.warn("Vercel integration uninstalled with auth error - token invalid", { + organizationId: organization.id, + organizationSlug, + userId, + integrationId: vercelIntegration.id, + }); + } else { + logger.info("Vercel integration uninstalled successfully", { + organizationId: organization.id, + organizationSlug, + userId, + integrationId: vercelIntegration.id, + }); + } + + // Redirect back to organization settings + return redirect(`/orgs/${organizationSlug}/settings`); +}; + +export default function VercelIntegrationPage() { + const { organization, vercelIntegration, connectedProjects, teamId, installationId } = + useTypedLoaderData(); + const actionData = useActionData(); + const navigation = useNavigation(); + const isUninstalling = navigation.state === "submitting" && + navigation.formData?.get("intent") === "uninstall"; + + if (!vercelIntegration) { + return ( + + +
+ No Vercel Integration Found + + This organization doesn't have a Vercel integration configured. + +
+
+
+ ); + } + + return ( + + +
+ Vercel Integration + + Manage your organization's Vercel integration and connected projects. + +
+ + {/* Integration Info Section */} +
+
+
+

Integration Details

+
+ {teamId && ( +
+ Vercel Team ID: {teamId} +
+ )} + {installationId && ( +
+ Installation ID: {installationId} +
+ )} +
+ Installed:{" "} + {formatDate(new Date(vercelIntegration.createdAt))} +
+
+
+
+ + + + + + + Remove Vercel Integration + + + This will permanently remove the Vercel integration and disconnect all projects. + This action cannot be undone. + + + + + + } + cancelButton={ + + + + } + /> + + + {actionData?.error && ( + + {actionData.error} + + )} +
+
+
+ + {/* Connected Projects Section */} +
+

+ Connected Projects ({connectedProjects.length}) +

+ + {connectedProjects.length === 0 ? ( +
+ + No projects are currently connected to this Vercel integration. + +
+ ) : ( +
+ + + Project Name + Vercel Project ID + Connected + Actions + + + + {connectedProjects.map((projectIntegration) => ( + + {projectIntegration.project.name} + + {projectIntegration.externalEntityId} + + + {formatDate(new Date(projectIntegration.createdAt))} + + + + Configure + + + + ))} + +
+ )} + + + + ); +} \ No newline at end of file diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx index 68c3306e284..d02f869c703 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx @@ -32,6 +32,7 @@ import { selectPlanPath, v3ProjectPath, } from "~/utils/pathBuilder"; +import { generateVercelOAuthState } from "~/v3/vercel/vercelOAuthState.server"; export async function loader({ params, request }: LoaderFunctionArgs) { const userId = await requireUserId(request); @@ -103,6 +104,12 @@ export const action: ActionFunction = async ({ request, params }) => { return json(submission); } + // Check for Vercel integration params in URL + const url = new URL(request.url); + const code = url.searchParams.get("code"); + const configurationId = url.searchParams.get("configurationId"); + const next = url.searchParams.get("next"); + try { const project = await createProject({ organizationSlug: organizationSlug, @@ -111,6 +118,44 @@ export const action: ActionFunction = async ({ request, params }) => { version: submission.value.projectVersion, }); + // If this is a Vercel integration flow, generate state and redirect to connect + if (code && configurationId) { + const environment = await prisma.runtimeEnvironment.findFirst({ + where: { + projectId: project.id, + slug: "prod", + archivedAt: null, + }, + }); + + if (!environment) { + return redirectWithErrorMessage( + newProjectPath({ slug: organizationSlug }), + request, + "Failed to find project environment." + ); + } + + const state = await generateVercelOAuthState({ + organizationId: project.organization.id, + projectId: project.id, + environmentSlug: environment.slug, + organizationSlug: project.organization.slug, + projectSlug: project.slug, + }); + + const params = new URLSearchParams({ + state, + code, + configurationId, + origin: "marketplace", + }); + if (next) { + params.set("next", next); + } + return redirect(`/vercel/connect?${params.toString()}`); + } + return redirectWithSuccessMessage( v3ProjectPath(project.organization, project), request, diff --git a/apps/webapp/app/routes/_app.orgs.new/route.tsx b/apps/webapp/app/routes/_app.orgs.new/route.tsx index a677782eaec..0a5c7fdd6ae 100644 --- a/apps/webapp/app/routes/_app.orgs.new/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.new/route.tsx @@ -69,6 +69,27 @@ export const action: ActionFunction = async ({ request }) => { }); } + // Preserve Vercel integration params if present + const url = new URL(request.url); + const code = url.searchParams.get("code"); + const configurationId = url.searchParams.get("configurationId"); + const integration = url.searchParams.get("integration"); + const next = url.searchParams.get("next"); + + if (code && configurationId && integration === "vercel") { + // Redirect to projects/new with params preserved + const params = new URLSearchParams({ + code, + configurationId, + integration, + }); + if (next) { + params.set("next", next); + } + const redirectUrl = `${organizationPath(organization)}/projects/new?${params.toString()}`; + return redirect(redirectUrl); + } + return redirect(organizationPath(organization)); } catch (error: any) { return json({ errors: { body: error.message } }, { status: 400 }); diff --git a/apps/webapp/app/routes/api.v1.deployments.$deploymentId.ts b/apps/webapp/app/routes/api.v1.deployments.$deploymentId.ts index ca3417b75bb..d0593e564fd 100644 --- a/apps/webapp/app/routes/api.v1.deployments.$deploymentId.ts +++ b/apps/webapp/app/routes/api.v1.deployments.$deploymentId.ts @@ -39,6 +39,7 @@ export async function loader({ request, params }: LoaderFunctionArgs) { tasks: true, }, }, + integrationDeployments: true, }, }); @@ -54,6 +55,7 @@ export async function loader({ request, params }: LoaderFunctionArgs) { version: deployment.version, imageReference: deployment.imageReference, imagePlatform: deployment.imagePlatform, + commitSHA: deployment.commitSHA, externalBuildData: deployment.externalBuildData as GetDeploymentResponseBody["externalBuildData"], errorData: deployment.errorData as GetDeploymentResponseBody["errorData"], @@ -69,5 +71,15 @@ export async function loader({ request, params }: LoaderFunctionArgs) { })), } : undefined, + integrationDeployments: + deployment.integrationDeployments.length > 0 + ? deployment.integrationDeployments.map((id) => ({ + id: id.id, + integrationName: id.integrationName, + integrationDeploymentId: id.integrationDeploymentId, + commitSHA: id.commitSHA, + createdAt: id.createdAt, + })) + : undefined, } satisfies GetDeploymentResponseBody); } diff --git a/apps/webapp/app/routes/api.v1.orgs.$organizationSlug.projects.$projectParam.vercel.projects.ts b/apps/webapp/app/routes/api.v1.orgs.$organizationSlug.projects.$projectParam.vercel.projects.ts new file mode 100644 index 00000000000..aaf54685888 --- /dev/null +++ b/apps/webapp/app/routes/api.v1.orgs.$organizationSlug.projects.$projectParam.vercel.projects.ts @@ -0,0 +1,147 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { json } from "@remix-run/server-runtime"; +import { fromPromise } from "neverthrow"; +import { z } from "zod"; +import { prisma } from "~/db.server"; +import { apiCors } from "~/utils/apiCors"; +import { logger } from "~/services/logger.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; +import { VercelIntegrationService } from "~/services/vercelIntegration.server"; + +const ParamsSchema = z.object({ + organizationSlug: z.string(), + projectParam: z.string(), +}); + +/** + * API endpoint to retrieve connected Vercel projects for a Trigger.dev project. + * + * GET /api/v1/orgs/:organizationSlug/projects/:projectParam/vercel/projects + * + * Returns: + * - vercelProject: The connected Vercel project details (if any) + * - config: The Vercel integration configuration + * - syncEnvVarsMapping: The environment variable sync mapping + */ +export async function loader({ request, params }: LoaderFunctionArgs) { + // Handle CORS + if (request.method === "OPTIONS") { + return apiCors(request, json({})); + } + + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return apiCors( + request, + json({ error: "Invalid or Missing Access Token" }, { status: 401 }) + ); + } + + const parsedParams = ParamsSchema.safeParse(params); + if (!parsedParams.success) { + return apiCors( + request, + json({ error: "Invalid parameters" }, { status: 400 }) + ); + } + + const { organizationSlug, projectParam } = parsedParams.data; + + const result = await fromPromise( + (async () => { + // Find the project, verifying org membership + const project = await prisma.project.findFirst({ + where: { + slug: projectParam, + organization: { + slug: organizationSlug, + members: { + some: { + userId: authenticationResult.userId, + }, + }, + }, + deletedAt: null, + }, + select: { + id: true, + name: true, + slug: true, + organizationId: true, + }, + }); + + if (!project) { + return { type: "not_found" as const }; + } + + // Get Vercel integration for the project + const vercelService = new VercelIntegrationService(); + const integration = await vercelService.getVercelProjectIntegration(project.id); + + return { type: "success" as const, project, integration }; + })(), + (error) => error + ); + + if (result.isErr()) { + logger.error("Failed to fetch Vercel projects", { + error: result.error, + organizationSlug, + projectParam, + }); + + return apiCors( + request, + json({ error: "Internal server error" }, { status: 500 }) + ); + } + + if (result.value.type === "not_found") { + return apiCors( + request, + json({ error: "Project not found" }, { status: 404 }) + ); + } + + const { project, integration } = result.value; + + if (!integration) { + return apiCors( + request, + json({ + connected: false, + vercelProject: null, + config: null, + syncEnvVarsMapping: null, + }) + ); + } + + const { parsedIntegrationData } = integration; + + return apiCors( + request, + json({ + connected: true, + vercelProject: { + id: parsedIntegrationData.vercelProjectId, + name: parsedIntegrationData.vercelProjectName, + teamId: parsedIntegrationData.vercelTeamId, + }, + config: { + atomicBuilds: parsedIntegrationData.config.atomicBuilds, + pullEnvVarsBeforeBuild: parsedIntegrationData.config.pullEnvVarsBeforeBuild, + vercelStagingEnvironment: parsedIntegrationData.config.vercelStagingEnvironment, + }, + syncEnvVarsMapping: parsedIntegrationData.syncEnvVarsMapping, + triggerProject: { + id: project.id, + name: project.name, + slug: project.slug, + }, + }) + ); +} + diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.envvars.$slug.import.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.envvars.$slug.import.ts index ad2372a654a..53bc4429c1d 100644 --- a/apps/webapp/app/routes/api.v1.projects.$projectRef.envvars.$slug.import.ts +++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.envvars.$slug.import.ts @@ -41,10 +41,13 @@ export async function action({ params, request }: ActionFunctionArgs) { const result = await repository.create(environment.project.id, { override: typeof body.override === "boolean" ? body.override : false, environmentIds: [environment.id], + // Pass parent environment ID so new variables can inherit isSecret from parent + parentEnvironmentId: environment.parentEnvironmentId ?? undefined, variables: Object.entries(body.variables).map(([key, value]) => ({ key, value, })), + lastUpdatedBy: body.source, }); // Only sync parent variables if this is a branch environment @@ -56,6 +59,7 @@ export async function action({ params, request }: ActionFunctionArgs) { key, value, })), + lastUpdatedBy: body.source, }); let childFailure = !result.success ? result : undefined; diff --git a/apps/webapp/app/routes/auth.github.callback.tsx b/apps/webapp/app/routes/auth.github.callback.tsx index 42473c64a49..2313b348f4a 100644 --- a/apps/webapp/app/routes/auth.github.callback.tsx +++ b/apps/webapp/app/routes/auth.github.callback.tsx @@ -5,6 +5,7 @@ import { getSession, redirectWithErrorMessage } from "~/models/message.server"; import { authenticator } from "~/services/auth.server"; import { setLastAuthMethodHeader } from "~/services/lastAuthMethod.server"; import { commitSession } from "~/services/sessionStorage.server"; +import { trackAndClearReferralSource } from "~/services/referralSource.server"; import { redirectCookie } from "./auth.github"; import { sanitizeRedirectPath } from "~/utils"; @@ -17,7 +18,6 @@ export let loader: LoaderFunction = async ({ request }) => { failureRedirect: "/login", // If auth fails, the failureRedirect will be thrown as a Response }); - // manually get the session const session = await getSession(request.headers.get("cookie")); const userRecord = await prisma.user.findFirst({ @@ -49,12 +49,13 @@ export let loader: LoaderFunction = async ({ request }) => { return redirect("/login/mfa", { headers }); } - // and store the user data session.set(authenticator.sessionKey, auth); const headers = new Headers(); headers.append("Set-Cookie", await commitSession(session)); headers.append("Set-Cookie", await setLastAuthMethodHeader("github")); + await trackAndClearReferralSource(request, auth.userId, headers); + return redirect(redirectTo, { headers }); }; diff --git a/apps/webapp/app/routes/auth.google.callback.tsx b/apps/webapp/app/routes/auth.google.callback.tsx index 783ddce3a3f..65dabd605ce 100644 --- a/apps/webapp/app/routes/auth.google.callback.tsx +++ b/apps/webapp/app/routes/auth.google.callback.tsx @@ -5,6 +5,7 @@ import { getSession, redirectWithErrorMessage } from "~/models/message.server"; import { authenticator } from "~/services/auth.server"; import { setLastAuthMethodHeader } from "~/services/lastAuthMethod.server"; import { commitSession } from "~/services/sessionStorage.server"; +import { trackAndClearReferralSource } from "~/services/referralSource.server"; import { redirectCookie } from "./auth.google"; import { sanitizeRedirectPath } from "~/utils"; @@ -17,7 +18,6 @@ export let loader: LoaderFunction = async ({ request }) => { failureRedirect: "/login", // If auth fails, the failureRedirect will be thrown as a Response }); - // manually get the session const session = await getSession(request.headers.get("cookie")); const userRecord = await prisma.user.findFirst({ @@ -49,13 +49,14 @@ export let loader: LoaderFunction = async ({ request }) => { return redirect("/login/mfa", { headers }); } - // and store the user data session.set(authenticator.sessionKey, auth); const headers = new Headers(); headers.append("Set-Cookie", await commitSession(session)); headers.append("Set-Cookie", await setLastAuthMethodHeader("google")); + await trackAndClearReferralSource(request, auth.userId, headers); + return redirect(redirectTo, { headers }); }; diff --git a/apps/webapp/app/routes/confirm-basic-details.tsx b/apps/webapp/app/routes/confirm-basic-details.tsx index 4187a2e9d0c..0596ee8b52a 100644 --- a/apps/webapp/app/routes/confirm-basic-details.tsx +++ b/apps/webapp/app/routes/confirm-basic-details.tsx @@ -25,6 +25,7 @@ import { redirectWithSuccessMessage } from "~/models/message.server"; import { updateUser } from "~/models/user.server"; import { requireUserId } from "~/services/session.server"; import { rootPath } from "~/utils/pathBuilder"; +import { getVercelInstallParams } from "~/v3/vercel"; function createSchema( constraints: { @@ -105,7 +106,24 @@ export const action: ActionFunction = async ({ request }) => { referralSource: submission.value.referralSource, }); - return redirectWithSuccessMessage(rootPath(), request, "Your details have been updated."); + // Preserve Vercel integration params if present + const vercelParams = getVercelInstallParams(request); + let redirectUrl = rootPath(); + + if (vercelParams) { + // Redirect to orgs/new with params preserved + const params = new URLSearchParams({ + code: vercelParams.code, + configurationId: vercelParams.configurationId, + integration: "vercel", + }); + if (vercelParams.next) { + params.set("next", vercelParams.next); + } + redirectUrl = `/orgs/new?${params.toString()}`; + } + + return redirectWithSuccessMessage(redirectUrl, request, "Your details have been updated."); } catch (error: any) { return json({ errors: { body: error.message } }, { status: 400 }); } diff --git a/apps/webapp/app/routes/login._index/route.tsx b/apps/webapp/app/routes/login._index/route.tsx index 40cea7905c8..8878ffc8889 100644 --- a/apps/webapp/app/routes/login._index/route.tsx +++ b/apps/webapp/app/routes/login._index/route.tsx @@ -167,7 +167,7 @@ export default function LoginPage() {
{data.lastAuthMethod === "email" && } { const parentMeta = matches @@ -160,11 +161,13 @@ async function completeLogin(request: Request, session: Session, userId: string) session.unset("pending-mfa-user-id"); session.unset("pending-mfa-redirect-to"); - return redirect(redirectTo, { - headers: { - "Set-Cookie": await sessionStorage.commitSession(authSession), - }, - }); + const headers = new Headers(); + headers.append("Set-Cookie", await sessionStorage.commitSession(authSession)); + headers.append("Set-Cookie", await commitSession(session)); + + await trackAndClearReferralSource(request, userId, headers); + + return redirect(redirectTo, { headers }); } export default function LoginMfaPage() { diff --git a/apps/webapp/app/routes/magic.tsx b/apps/webapp/app/routes/magic.tsx index c45b6882caf..682f0ef46e5 100644 --- a/apps/webapp/app/routes/magic.tsx +++ b/apps/webapp/app/routes/magic.tsx @@ -6,6 +6,7 @@ import { authenticator } from "~/services/auth.server"; import { setLastAuthMethodHeader } from "~/services/lastAuthMethod.server"; import { getRedirectTo } from "~/services/redirectTo.server"; import { commitSession, getSession } from "~/services/sessionStorage.server"; +import { trackAndClearReferralSource } from "~/services/referralSource.server"; export async function loader({ request }: LoaderFunctionArgs) { const redirectTo = await getRedirectTo(request); @@ -53,5 +54,7 @@ export async function loader({ request }: LoaderFunctionArgs) { headers.append("Set-Cookie", await commitSession(session)); headers.append("Set-Cookie", await setLastAuthMethodHeader("email")); + await trackAndClearReferralSource(request, auth.userId, headers); + return redirect(redirectTo ?? "/", { headers }); } diff --git a/apps/webapp/app/routes/resources.environments.$environmentId.regenerate-api-key.tsx b/apps/webapp/app/routes/resources.environments.$environmentId.regenerate-api-key.tsx index 7ad6b1c6c57..5efb69bc723 100644 --- a/apps/webapp/app/routes/resources.environments.$environmentId.regenerate-api-key.tsx +++ b/apps/webapp/app/routes/resources.environments.$environmentId.regenerate-api-key.tsx @@ -2,8 +2,10 @@ import type { ActionFunctionArgs } from "@remix-run/server-runtime"; import { z } from "zod"; import { environmentFullTitle } from "~/components/environments/EnvironmentLabel"; import { regenerateApiKey } from "~/models/api-key.server"; +import { VercelIntegrationRepository } from "~/models/vercelIntegration.server"; import { jsonWithErrorMessage, jsonWithSuccessMessage } from "~/models/message.server"; import { requireUserId } from "~/services/session.server"; +import { logger } from "~/services/logger.server"; const ParamsSchema = z.object({ environmentId: z.string(), @@ -19,9 +21,21 @@ export async function action({ request, params }: ActionFunctionArgs) { const { environmentId } = ParamsSchema.parse(params); + const formData = await request.formData(); + const syncToVercel = formData.get("syncToVercel") === "on"; + try { const updatedEnvironment = await regenerateApiKey({ userId, environmentId }); + // Sync the regenerated API key to Vercel only when requested and not for DEVELOPMENT + if (syncToVercel && updatedEnvironment.type !== "DEVELOPMENT") { + await syncApiKeyToVercel( + updatedEnvironment.projectId, + updatedEnvironment.type as "PRODUCTION" | "STAGING" | "PREVIEW", + updatedEnvironment.apiKey + ); + } + return jsonWithSuccessMessage( { ok: true }, request, @@ -37,3 +51,27 @@ export async function action({ request, params }: ActionFunctionArgs) { ); } } + +/** + * Sync the API key to Vercel. + * Errors are logged but won't fail the API key regeneration. + */ +async function syncApiKeyToVercel( + projectId: string, + environmentType: "PRODUCTION" | "STAGING" | "PREVIEW" | "DEVELOPMENT", + apiKey: string +): Promise { + const result = await VercelIntegrationRepository.syncSingleApiKeyToVercel({ + projectId, + environmentType, + apiKey, + }); + + if (result.isErr()) { + logger.warn("syncSingleApiKeyToVercel returned failure", { + projectId, + environmentType, + error: result.error.message, + }); + } +} diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github.tsx index bb7406ed440..afd89f33577 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github.tsx @@ -330,12 +330,15 @@ export function ConnectGitHubRepoModal({ projectSlug, environmentSlug, redirectUrl, + preventDismiss, }: { gitHubAppInstallations: GitHubAppInstallation[]; organizationSlug: string; projectSlug: string; environmentSlug: string; redirectUrl?: string; + /** When true, prevents closing the modal via Escape key or clicking outside */ + preventDismiss?: boolean; }) { const [isModalOpen, setIsModalOpen] = useState(false); const lastSubmission = useActionData() as any; @@ -385,13 +388,34 @@ export function ConnectGitHubRepoModal({ const actionUrl = gitHubResourcePath(organizationSlug, projectSlug, environmentSlug); return ( - + { + // When preventDismiss is true, only allow opening, not closing + if (preventDismiss && !open) { + return; + } + setIsModalOpen(open); + }} + > - + { + if (preventDismiss) { + e.preventDefault(); + } + }} + onEscapeKeyDown={(e) => { + if (preventDismiss) { + e.preventDefault(); + } + }} + > Connect GitHub repository
@@ -514,9 +538,11 @@ export function ConnectGitHubRepoModal({ } cancelButton={ - - - + preventDismiss ? undefined : ( + + + + ) } /> diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel.tsx new file mode 100644 index 00000000000..c25f99b0554 --- /dev/null +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel.tsx @@ -0,0 +1,926 @@ +import { useForm } from "@conform-to/react"; +import { parse } from "@conform-to/zod"; +import { + CheckCircleIcon, + ExclamationTriangleIcon, +} from "@heroicons/react/20/solid"; +import { + Form, + useActionData, + useFetcher, + useNavigation, + useLocation, +} from "@remix-run/react"; +import { + type ActionFunctionArgs, + type LoaderFunctionArgs, + json, +} from "@remix-run/server-runtime"; +import { typedjson, useTypedFetcher } from "remix-typedjson"; +import { z } from "zod"; +import { Dialog, DialogContent, DialogHeader, DialogTrigger } from "~/components/primitives/Dialog"; +import { DialogClose } from "@radix-ui/react-dialog"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { Callout } from "~/components/primitives/Callout"; +import { Fieldset } from "~/components/primitives/Fieldset"; +import { FormButtons } from "~/components/primitives/FormButtons"; +import { FormError } from "~/components/primitives/FormError"; +import { Hint } from "~/components/primitives/Hint"; +import { InputGroup } from "~/components/primitives/InputGroup"; +import { Label } from "~/components/primitives/Label"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import { Select, SelectItem } from "~/components/primitives/Select"; +import { SpinnerWhite } from "~/components/primitives/Spinner"; +import { DateTime } from "~/components/primitives/DateTime"; +import { VercelLogo } from "~/components/integrations/VercelLogo"; +import { BuildSettingsFields } from "~/components/integrations/VercelBuildSettings"; +import { + redirectBackWithErrorMessage, + redirectWithSuccessMessage, + redirectWithErrorMessage, +} from "~/models/message.server"; +import { findProjectBySlug } from "~/models/project.server"; +import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; +import { logger } from "~/services/logger.server"; +import { requireUserId } from "~/services/session.server"; +import { sanitizeVercelNextUrl } from "~/v3/vercel/vercelUrls.server"; +import { EnvironmentParamSchema, v3ProjectSettingsPath, vercelAppInstallPath, vercelResourcePath } from "~/utils/pathBuilder"; +import { + VercelSettingsPresenter, + type VercelOnboardingData, +} from "~/presenters/v3/VercelSettingsPresenter.server"; +import { VercelIntegrationService } from "~/services/vercelIntegration.server"; +import { VercelIntegrationRepository } from "~/models/vercelIntegration.server"; +import { + type VercelProjectIntegrationData, + type SyncEnvVarsMapping, + type EnvSlug, + envSlugArrayField, + envTypeToSlug, + getAvailableEnvSlugs, + getAvailableEnvSlugsForBuildSettings, +} from "~/v3/vercel/vercelProjectIntegrationSchema"; +import { Result, fromPromise } from "neverthrow"; +import { useEffect, useState } from "react"; + +export type ConnectedVercelProject = { + id: string; + vercelProjectId: string; + vercelProjectName: string; + vercelTeamId: string | null; + integrationData: VercelProjectIntegrationData; + createdAt: Date; +}; + +const safeJsonParse = Result.fromThrowable( + (val: string) => JSON.parse(val) as Record, + () => null +); + +function parseVercelStagingEnvironment( + value: string | null | undefined +): { environmentId: string; displayName: string } | null { + if (!value) return null; + return safeJsonParse(value).match( + (parsed) => { + if (typeof parsed?.environmentId === "string" && typeof parsed?.displayName === "string") { + return { environmentId: parsed.environmentId, displayName: parsed.displayName }; + } + return null; + }, + () => null + ); +} + +const UpdateVercelConfigFormSchema = z.object({ + action: z.literal("update-config"), + atomicBuilds: envSlugArrayField, + pullEnvVarsBeforeBuild: envSlugArrayField, + discoverEnvVars: envSlugArrayField, + vercelStagingEnvironment: z.string().nullable().optional(), +}); + +const DisconnectVercelFormSchema = z.object({ + action: z.literal("disconnect"), +}); + +const CompleteOnboardingFormSchema = z.object({ + action: z.literal("complete-onboarding"), + vercelStagingEnvironment: z.string().nullable().optional(), + pullEnvVarsBeforeBuild: envSlugArrayField, + atomicBuilds: envSlugArrayField, + discoverEnvVars: envSlugArrayField, + syncEnvVarsMapping: z.string().optional(), + next: z.string().optional(), + skipRedirect: z.string().optional().transform((val) => val === "true"), +}); + +const SkipOnboardingFormSchema = z.object({ + action: z.literal("skip-onboarding"), +}); + +const SelectVercelProjectFormSchema = z.object({ + action: z.literal("select-vercel-project"), + vercelProjectId: z.string().min(1, "Please select a Vercel project"), + vercelProjectName: z.string().min(1), +}); + +const UpdateEnvMappingFormSchema = z.object({ + action: z.literal("update-env-mapping"), + vercelStagingEnvironment: z.string().nullable().optional(), +}); + +const DisableAutoAssignFormSchema = z.object({ + action: z.literal("disable-auto-assign"), +}); + +const VercelActionSchema = z.discriminatedUnion("action", [ + UpdateVercelConfigFormSchema, + DisconnectVercelFormSchema, + CompleteOnboardingFormSchema, + SkipOnboardingFormSchema, + SelectVercelProjectFormSchema, + UpdateEnvMappingFormSchema, + DisableAutoAssignFormSchema, +]); + +export async function loader({ request, params }: LoaderFunctionArgs) { + const userId = await requireUserId(request); + const { organizationSlug, projectParam, envParam } = EnvironmentParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + if (!project) { + throw new Response("Not Found", { status: 404 }); + } + + const environment = await findEnvironmentBySlug(project.id, envParam, userId); + if (!environment) { + throw new Response("Not Found", { status: 404 }); + } + + const presenter = new VercelSettingsPresenter(); + const resultOrFail = await presenter.call({ + projectId: project.id, + organizationId: project.organizationId, + }); + + if (resultOrFail.isErr()) { + logger.error("Failed to load Vercel settings", { + url: request.url, + params, + error: resultOrFail.error, + }); + throw new Response("Failed to load Vercel settings", { status: 500 }); + } + + const result = resultOrFail.value; + const url = new URL(request.url); + const needsOnboarding = url.searchParams.get("vercelOnboarding") === "true"; + const vercelEnvironmentId = url.searchParams.get("vercelEnvironmentId") || undefined; + + let onboardingData: VercelOnboardingData | null = null; + if (needsOnboarding) { + onboardingData = await presenter.getOnboardingData( + project.id, + project.organizationId, + vercelEnvironmentId + ); + } + + const authInvalid = onboardingData?.authInvalid || result.authInvalid || false; + + return typedjson({ + ...result, + authInvalid, + onboardingData, + organizationSlug, + projectSlug: projectParam, + environmentSlug: envParam, + projectId: project.id, + organizationId: project.organizationId, + }); +} + +export async function action({ request, params }: ActionFunctionArgs) { + const userId = await requireUserId(request); + const { organizationSlug, projectParam, envParam } = EnvironmentParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + if (!project) { + throw new Response("Not Found", { status: 404 }); + } + + const environment = await findEnvironmentBySlug(project.id, envParam, userId); + if (!environment) { + throw new Response("Not Found", { status: 404 }); + } + + const formData = await request.formData(); + const submission = parse(formData, { schema: VercelActionSchema }); + + if (!submission.value || submission.intent !== "submit") { + return json(submission); + } + + const settingsPath = v3ProjectSettingsPath( + { slug: organizationSlug }, + { slug: projectParam }, + { slug: envParam } + ); + + const vercelService = new VercelIntegrationService(); + const { action: actionType } = submission.value; + + switch (actionType) { + case "update-config": { + const { + atomicBuilds, + pullEnvVarsBeforeBuild, + discoverEnvVars, + vercelStagingEnvironment, + } = submission.value; + + const parsedStagingEnv = parseVercelStagingEnvironment(vercelStagingEnvironment); + + const result = await vercelService.updateVercelIntegrationConfig(project.id, { + atomicBuilds, + pullEnvVarsBeforeBuild, + discoverEnvVars, + vercelStagingEnvironment: parsedStagingEnv, + }); + + if (result) { + return redirectWithSuccessMessage(settingsPath, request, "Vercel settings updated successfully"); + } + + return redirectWithErrorMessage(settingsPath, request, "Failed to update Vercel settings"); + } + + case "disconnect": { + const success = await vercelService.disconnectVercelProject(project.id); + + if (success) { + return redirectWithSuccessMessage(settingsPath, request, "Vercel project disconnected"); + } + + return redirectWithErrorMessage(settingsPath, request, "Failed to disconnect Vercel project"); + } + + case "complete-onboarding": { + const { + vercelStagingEnvironment, + pullEnvVarsBeforeBuild, + atomicBuilds, + discoverEnvVars, + syncEnvVarsMapping, + next, + skipRedirect, + } = submission.value; + + const parsedStagingEnv = parseVercelStagingEnvironment(vercelStagingEnvironment); + const parsedSyncEnvVarsMapping = syncEnvVarsMapping + ? safeJsonParse(syncEnvVarsMapping).unwrapOr(undefined) as SyncEnvVarsMapping | undefined + : undefined; + + const result = await vercelService.completeOnboarding(project.id, { + vercelStagingEnvironment: parsedStagingEnv, + pullEnvVarsBeforeBuild, + atomicBuilds, + discoverEnvVars, + syncEnvVarsMapping: parsedSyncEnvVarsMapping, + }); + + if (result) { + if (skipRedirect) { + return json({ success: true }); + } + + if (next) { + const sanitizedNext = sanitizeVercelNextUrl(next); + if (sanitizedNext) { + return json({ success: true, redirectTo: sanitizedNext }); + } + logger.warn("Rejected next URL - not same-origin or vercel.com", { next }); + } + + return json({ success: true, redirectTo: settingsPath }); + } + + return redirectWithErrorMessage(settingsPath, request, "Failed to complete Vercel setup"); + } + + case "update-env-mapping": { + const { vercelStagingEnvironment } = submission.value; + + const parsedStagingEnv = parseVercelStagingEnvironment(vercelStagingEnvironment); + + const result = await vercelService.updateVercelIntegrationConfig(project.id, { + vercelStagingEnvironment: parsedStagingEnv, + }); + + if (result) { + return json({ success: true }); + } + + return json({ success: false, error: "Failed to update environment mapping" }, { status: 400 }); + } + + case "skip-onboarding": { + return redirectWithSuccessMessage(settingsPath, request, "Vercel integration setup skipped"); + } + + case "select-vercel-project": { + const { vercelProjectId, vercelProjectName } = submission.value; + + const selectResult = await fromPromise( + vercelService.selectVercelProject({ + organizationId: project.organizationId, + projectId: project.id, + vercelProjectId, + vercelProjectName, + userId, + }), + (error) => error + ); + + if (selectResult.isErr()) { + logger.error("Failed to select Vercel project", { error: selectResult.error }); + return json({ + error: "Failed to connect Vercel project. Please try again.", + }); + } + + const { integration, syncResult } = selectResult.value; + + if (!syncResult.success && syncResult.errors.length > 0) { + logger.warn("Failed to send trigger secrets to Vercel", { + projectId: project.id, + vercelProjectId, + errors: syncResult.errors, + }); + } + + return json({ + success: true, + integrationId: integration.id, + syncErrors: syncResult.errors, + }); + } + + case "disable-auto-assign": { + const orgIntegration = await VercelIntegrationRepository.findVercelOrgIntegrationForProject( + project.id + ); + + if (!orgIntegration) { + return redirectWithErrorMessage(settingsPath, request, "No Vercel integration found"); + } + + const projectIntegration = await vercelService.getVercelProjectIntegration(project.id); + + if (!projectIntegration) { + return redirectWithErrorMessage(settingsPath, request, "No Vercel project connected"); + } + + const teamId = await VercelIntegrationRepository.getTeamIdFromIntegration(orgIntegration); + + const disableResult = await VercelIntegrationRepository.getVercelClient(orgIntegration) + .andThen((client) => + VercelIntegrationRepository.disableAutoAssignCustomDomains( + client, + projectIntegration.parsedIntegrationData.vercelProjectId, + teamId + ) + ); + + if (disableResult.isErr()) { + logger.error("Failed to disable auto-assign custom domains", { error: disableResult.error }); + return redirectWithErrorMessage(settingsPath, request, "Failed to disable auto-assign custom domains"); + } + + return redirectWithSuccessMessage(settingsPath, request, "Auto-assign custom domains disabled"); + } + + default: { + submission.value satisfies never; + return redirectBackWithErrorMessage(request, "Failed to process request"); + } + } +} + +function VercelConnectionPrompt({ + organizationSlug, + projectSlug, + environmentSlug, + hasOrgIntegration, + isGitHubConnected, + onOpenModal, + isLoading, +}: { + organizationSlug: string; + projectSlug: string; + environmentSlug: string; + hasOrgIntegration: boolean; + isGitHubConnected: boolean; + onOpenModal?: () => void; + isLoading?: boolean; +}) { + const installPath = vercelAppInstallPath(organizationSlug, projectSlug); + + const handleConnectProject = () => { + if (onOpenModal) { + onOpenModal(); + } + }; + + const isLoadingProjects = isLoading ?? false; + const isDisabled = isLoadingProjects || !onOpenModal; + + return ( +
+ +
+
+ {hasOrgIntegration ? ( + <> + + + Vercel app is installed + + {!onOpenModal && ( + + Please reconnect Vercel to continue + + )} + + ) : ( + <> + } + > + Install Vercel app + + + )} +
+
+
+
+ ); +} + +function VercelAuthInvalidBanner({ + organizationSlug, + projectSlug, +}: { + organizationSlug: string; + projectSlug: string; +}) { + const installUrl = vercelAppInstallPath(organizationSlug, projectSlug); + + return ( + +
+
+

+ Vercel connection expired +

+

+ Your Vercel access token has expired or been revoked. Please reconnect to restore functionality. +

+ + Reconnect Vercel + +
+
+
+ ); +} + +function VercelGitHubWarning() { + return ( + +

+ GitHub integration is not connected. Vercel integration cannot sync environment variables and + link deployments without a properly installed GitHub integration. +

+
+ ); +} + +function envSlugLabel(slug: EnvSlug): string { + switch (slug) { + case "prod": + return "Production"; + case "stg": + return "Staging"; + case "preview": + return "Preview"; + case "dev": + return "Development"; + } +} + +function ConnectedVercelProjectForm({ + connectedProject, + hasStagingEnvironment, + hasPreviewEnvironment, + customEnvironments, + autoAssignCustomDomains, + organizationSlug, + projectSlug, + environmentSlug, +}: { + connectedProject: ConnectedVercelProject; + hasStagingEnvironment: boolean; + hasPreviewEnvironment: boolean; + customEnvironments: Array<{ id: string; slug: string }>; + autoAssignCustomDomains: boolean | null; + organizationSlug: string; + projectSlug: string; + environmentSlug: string; +}) { + const lastSubmission = useActionData() as any; + const navigation = useNavigation(); + + const [hasConfigChanges, setHasConfigChanges] = useState(false); + const [configValues, setConfigValues] = useState({ + atomicBuilds: connectedProject.integrationData.config.atomicBuilds ?? [], + pullEnvVarsBeforeBuild: connectedProject.integrationData.config.pullEnvVarsBeforeBuild ?? [], + discoverEnvVars: connectedProject.integrationData.config.discoverEnvVars ?? [], + vercelStagingEnvironment: + connectedProject.integrationData.config.vercelStagingEnvironment ?? null, + }); + + const originalAtomicBuilds = connectedProject.integrationData.config.atomicBuilds ?? []; + const originalPullEnvVars = connectedProject.integrationData.config.pullEnvVarsBeforeBuild ?? []; + const originalDiscoverEnvVars = connectedProject.integrationData.config.discoverEnvVars ?? []; + const originalStagingEnv = connectedProject.integrationData.config.vercelStagingEnvironment ?? null; + + useEffect(() => { + const atomicBuildsChanged = + JSON.stringify([...configValues.atomicBuilds].sort()) !== + JSON.stringify([...originalAtomicBuilds].sort()); + const pullEnvVarsChanged = + JSON.stringify([...configValues.pullEnvVarsBeforeBuild].sort()) !== + JSON.stringify([...originalPullEnvVars].sort()); + const discoverEnvVarsChanged = + JSON.stringify([...configValues.discoverEnvVars].sort()) !== + JSON.stringify([...originalDiscoverEnvVars].sort()); + const stagingEnvChanged = configValues.vercelStagingEnvironment?.environmentId !== originalStagingEnv?.environmentId; + + setHasConfigChanges(atomicBuildsChanged || pullEnvVarsChanged || discoverEnvVarsChanged || stagingEnvChanged); + }, [configValues, originalAtomicBuilds, originalPullEnvVars, originalDiscoverEnvVars, originalStagingEnv]); + + const [configForm, fields] = useForm({ + id: "update-vercel-config", + lastSubmission: lastSubmission, + shouldRevalidate: "onSubmit", + onValidate({ formData }) { + return parse(formData, { + schema: UpdateVercelConfigFormSchema, + }); + }, + }); + + const isConfigLoading = + navigation.formData?.get("action") === "update-config" && + (navigation.state === "submitting" || navigation.state === "loading"); + + const actionUrl = vercelResourcePath(organizationSlug, projectSlug, environmentSlug); + + const availableEnvSlugs = getAvailableEnvSlugs(hasStagingEnvironment, hasPreviewEnvironment); + const availableEnvSlugsForBuildSettings = getAvailableEnvSlugsForBuildSettings(hasStagingEnvironment, hasPreviewEnvironment); + + const formatSelectedEnvs = (selected: EnvSlug[], availableSlugs: EnvSlug[] = availableEnvSlugs): string => { + if (selected.length === 0) return "None selected"; + if (selected.length === availableSlugs.length) return "All environments"; + return selected.map(envSlugLabel).join(", "); + }; + + return ( + <> +
+
+ + + {connectedProject.vercelProjectName} + + + + +
+ + + + + + Disconnect Vercel project +
+ + Are you sure you want to disconnect{" "} + {connectedProject.vercelProjectName}? + This will stop pulling environment variables and disable atomic deployments. + + + + + + } + cancelButton={ + + + + } + /> +
+
+
+
+ + {/* Configuration form */} +
+ + + + + +
+ +
+ {/* Staging environment mapping */} + {hasStagingEnvironment && customEnvironments && customEnvironments.length > 0 && ( +
+ + + Select which custom Vercel environment should map to Trigger.dev's Staging + environment. + + +
+ )} + + + setConfigValues((prev) => ({ ...prev, pullEnvVarsBeforeBuild: slugs })) + } + discoverEnvVars={configValues.discoverEnvVars} + onDiscoverEnvVarsChange={(slugs) => + setConfigValues((prev) => ({ ...prev, discoverEnvVars: slugs })) + } + atomicBuilds={configValues.atomicBuilds} + onAtomicBuildsChange={(slugs) => + setConfigValues((prev) => ({ ...prev, atomicBuilds: slugs })) + } + envVarsConfigLink={`/orgs/${organizationSlug}/projects/${projectSlug}/env/${environmentSlug}/environment-variables`} + /> + + {/* Warning: autoAssignCustomDomains must be disabled for atomic deployments */} + {autoAssignCustomDomains !== false && + configValues.atomicBuilds.includes("prod") && ( + +
+

+ Atomic deployments require the "Auto-assign Custom Domains" setting to be + disabled on your Vercel project. Without this, Vercel will promote + deployments before Trigger.dev is ready. +

+ + + + +
+
+ )} +
+ + {configForm.error} +
+ + + Save + + } + /> +
+ + + ); +} + +function VercelSettingsPanel({ + organizationSlug, + projectSlug, + environmentSlug, + onOpenVercelModal, + isLoadingVercelData, +}: { + organizationSlug: string; + projectSlug: string; + environmentSlug: string; + onOpenVercelModal?: () => void; + isLoadingVercelData?: boolean; +}) { + const fetcher = useTypedFetcher(); + const location = useLocation(); + const data = fetcher.data; + const [hasError, setHasError] = useState(false); + const [hasFetched, setHasFetched] = useState(false); + + useEffect(() => { + if (!data?.authInvalid && !hasError && !data && !hasFetched) { + fetcher.load(vercelResourcePath(organizationSlug, projectSlug, environmentSlug)); + setHasFetched(true); + } + }, [organizationSlug, projectSlug, environmentSlug, data?.authInvalid, hasError, data, hasFetched]); + + useEffect(() => { + if (hasFetched && fetcher.state === "idle" && fetcher.data === undefined && !hasError) { + setHasError(true); + } + }, [fetcher.state, fetcher.data, hasError, hasFetched]); + + if (hasError) { + return ( +
+
+ +
+

Failed to load Vercel settings

+

+ There was an error loading the Vercel integration settings. Please refresh the page to try again. +

+
+
+
+ ); + } + + if (fetcher.state === "loading" && !data) { + return ( +
+ + Loading Vercel settings... +
+ ); + } + + if (!data || !data.enabled) { + return null; + } + + const showGitHubWarning = data.connectedProject && !data.isGitHubConnected; + const showAuthInvalid = data.authInvalid || data.onboardingData?.authInvalid; + + if (data.connectedProject) { + return ( + <> + {showAuthInvalid && } + {showGitHubWarning && } + {!showAuthInvalid && ()} + + ); + } + + return ( +
+ {showAuthInvalid && } + {!showAuthInvalid && ( + <> + + + {data.hasOrgIntegration + ? "Connect your Vercel project to pull environment variables and trigger builds automatically." + : "Install the Vercel app to connect your projects and pull environment variables."} + + {!data.isGitHubConnected && ( + + GitHub integration is not connected. Vercel integration cannot sync environment variables and + link deployments without a properly installed GitHub integration. + + )} + + )} +
+ ); +} + + +import { VercelOnboardingModal } from "~/components/integrations/VercelOnboardingModal"; + +export { VercelSettingsPanel, VercelOnboardingModal }; diff --git a/apps/webapp/app/routes/vercel.callback.ts b/apps/webapp/app/routes/vercel.callback.ts new file mode 100644 index 00000000000..6a188acfa3f --- /dev/null +++ b/apps/webapp/app/routes/vercel.callback.ts @@ -0,0 +1,78 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { redirect } from "@remix-run/server-runtime"; +import { z } from "zod"; +import { logger } from "~/services/logger.server"; +import { getUserId } from "~/services/session.server"; +import { setReferralSourceCookie } from "~/services/referralSource.server"; +import { requestUrl } from "~/utils/requestUrl.server"; +import { sanitizeVercelNextUrl } from "~/v3/vercel/vercelUrls.server"; + +const VercelCallbackSchema = z + .object({ + code: z.string().optional(), + state: z.string().optional(), + error: z.string().optional(), + error_description: z.string().optional(), + configurationId: z.string().optional(), + next: z.string().optional() + }) + .passthrough(); + +export async function loader({ request }: LoaderFunctionArgs) { + if (request.method.toUpperCase() !== "GET") { + throw new Response("Method Not Allowed", { status: 405 }); + } + + const userId = await getUserId(request); + if (!userId) { + const currentUrl = new URL(request.url); + const redirectTo = `${currentUrl.pathname}${currentUrl.search}`; + const referralCookie = await setReferralSourceCookie("vercel"); + + const headers = new Headers(); + headers.append("Set-Cookie", referralCookie); + + throw redirect(`/login?redirectTo=${encodeURIComponent(redirectTo)}`, { headers }); + } + + const url = requestUrl(request); + const parsed = VercelCallbackSchema.safeParse(Object.fromEntries(url.searchParams)); + + if (!parsed.success) { + logger.error("Invalid Vercel callback params", { error: parsed.error }); + throw new Response("Invalid callback parameters", { status: 400 }); + } + + const { code, state, error, error_description, configurationId, next: rawNextUrl } = parsed.data; + + // Sanitize the `next` parameter to prevent open redirects + const nextUrl = sanitizeVercelNextUrl(rawNextUrl); + + if (error) { + logger.error("Vercel OAuth error", { error, error_description }); + throw new Response("Vercel OAuth error", { status: 500 }); + } + + if (!code) { + logger.error("Missing authorization code from Vercel callback"); + throw new Response("Missing authorization code", { status: 400 }); + } + + // Route with state: dashboard-invoked flow + if (state) { + const params = new URLSearchParams({ state, code, origin: "dashboard" }); + if (configurationId) params.set("configurationId", configurationId); + if (nextUrl) params.set("next", nextUrl); + return redirect(`/vercel/connect?${params.toString()}`); + } + + // Route without state but with configurationId: marketplace-invoked flow + if (configurationId) { + const params = new URLSearchParams({ code, configurationId, origin: "marketplace" }); + if (nextUrl) params.set("next", nextUrl); + return redirect(`/vercel/onboarding?${params.toString()}`); + } + + logger.error("Missing both state and configurationId from Vercel callback"); + throw new Response("Missing state or configurationId parameter", { status: 400 }); +} diff --git a/apps/webapp/app/routes/vercel.configure.tsx b/apps/webapp/app/routes/vercel.configure.tsx new file mode 100644 index 00000000000..25b9197176c --- /dev/null +++ b/apps/webapp/app/routes/vercel.configure.tsx @@ -0,0 +1,52 @@ +import type { LoaderFunctionArgs } from "@remix-run/node"; +import { redirect } from "@remix-run/node"; +import { z } from "zod"; +import { prisma } from "~/db.server"; +import { requireUserId } from "~/services/session.server"; +import { organizationVercelIntegrationPath } from "~/utils/pathBuilder"; + +const SearchParamsSchema = z.object({ + configurationId: z.string(), +}); + +/** + * Endpoint to handle Vercel integration configuration request coming from marketplace + */ +export const loader = async ({ request }: LoaderFunctionArgs) => { + await requireUserId(request); + const url = new URL(request.url); + const searchParams = Object.fromEntries(url.searchParams); + + const { configurationId } = SearchParamsSchema.parse(searchParams); + + // Find the organization integration by configurationId (installationId in integrationData) + const integration = await prisma.organizationIntegration.findFirst({ + where: { + service: "VERCEL", + deletedAt: null, + integrationData: { + path: ["installationId"], + equals: configurationId, + }, + }, + include: { + organization: { + select: { + slug: true, + }, + }, + }, + }); + + if (!integration) { + throw new Response("Integration not found", { status: 404 }); + } + + // Redirect to the organization's Vercel integration page + return redirect(organizationVercelIntegrationPath(integration.organization)); +}; + +// This route doesn't render anything, it just redirects +export default function VercelConfigurePage() { + return null; +} \ No newline at end of file diff --git a/apps/webapp/app/routes/vercel.connect.tsx b/apps/webapp/app/routes/vercel.connect.tsx new file mode 100644 index 00000000000..7c0701edfe3 --- /dev/null +++ b/apps/webapp/app/routes/vercel.connect.tsx @@ -0,0 +1,170 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { redirect } from "@remix-run/server-runtime"; +import { fromPromise } from "neverthrow"; +import { z } from "zod"; +import { prisma } from "~/db.server"; +import { VercelIntegrationRepository, type TokenResponse } from "~/models/vercelIntegration.server"; +import { logger } from "~/services/logger.server"; +import { requireUserId } from "~/services/session.server"; +import { requestUrl } from "~/utils/requestUrl.server"; +import { v3ProjectSettingsPath } from "~/utils/pathBuilder"; +import { validateVercelOAuthState } from "~/v3/vercel/vercelOAuthState.server"; + +const VercelConnectSchema = z.object({ + state: z.string(), + configurationId: z.string().optional(), + code: z.string(), + next: z.string().optional(), + origin: z.enum(["marketplace", "dashboard"]), +}); + +async function createOrFindVercelIntegration( + organizationId: string, + projectId: string, + tokenResponse: TokenResponse, + configurationId: string | undefined, + origin: 'marketplace' | 'dashboard' +): Promise { + const project = await prisma.project.findUnique({ + where: { id: projectId }, + include: { organization: true }, + }); + + if (!project) { + throw new Error("Project not found"); + } + + let orgIntegration = await VercelIntegrationRepository.findVercelOrgIntegrationByTeamId( + organizationId, + tokenResponse.teamId ?? null + ); + + if (orgIntegration) { + await VercelIntegrationRepository.updateVercelOrgIntegrationToken({ + integrationId: orgIntegration.id, + accessToken: tokenResponse.accessToken, + tokenType: tokenResponse.tokenType, + teamId: tokenResponse.teamId ?? null, + userId: tokenResponse.userId, + installationId: configurationId, + raw: tokenResponse.raw + }); + } else { + await VercelIntegrationRepository.createVercelOrgIntegration({ + accessToken: tokenResponse.accessToken, + tokenType: tokenResponse.tokenType, + teamId: tokenResponse.teamId ?? null, + userId: tokenResponse.userId, + installationId: configurationId, + organization: project.organization, + raw: tokenResponse.raw, + origin, + }); + } +} + +export async function loader({ request }: LoaderFunctionArgs) { + const userId = await requireUserId(request); + const url = requestUrl(request); + + const parsed = VercelConnectSchema.safeParse(Object.fromEntries(url.searchParams)); + if (!parsed.success) { + logger.error("Invalid Vercel connect params", { error: parsed.error }); + throw new Response("Invalid parameters", { status: 400 }); + } + + const { state, configurationId, code, next, origin } = parsed.data; + + const validationResult = await validateVercelOAuthState(state); + if (!validationResult.ok) { + logger.error("Invalid Vercel OAuth state JWT", { error: validationResult.error }); + + if ( + validationResult.error?.includes("expired") || + validationResult.error?.includes("Token has expired") + ) { + const params = new URLSearchParams({ error: "expired" }); + return redirect(`/vercel/onboarding?${params.toString()}`); + } + + throw new Response("Invalid state", { status: 400 }); + } + + const stateData = validationResult.state; + + const project = await prisma.project.findFirst({ + where: { + id: stateData.projectId, + organizationId: stateData.organizationId, + deletedAt: null, + organization: { + members: { + some: { userId }, + }, + }, + }, + include: { + organization: true, + }, + }); + + if (!project) { + logger.error("Project not found or access denied", { + projectId: stateData.projectId, + userId, + }); + throw new Response("Project not found", { status: 404 }); + } + + const tokenResult = await VercelIntegrationRepository.exchangeCodeForToken(code); + if (tokenResult.isErr()) { + const params = new URLSearchParams({ error: "expired" }); + return redirect(`/vercel/onboarding?${params.toString()}`); + } + const tokenResponse = tokenResult.value; + + const environment = await prisma.runtimeEnvironment.findFirst({ + where: { + projectId: project.id, + slug: stateData.environmentSlug, + archivedAt: null, + }, + }); + + if (!environment) { + logger.error("Environment not found", { + projectId: project.id, + environmentSlug: stateData.environmentSlug, + }); + throw new Response("Environment not found", { status: 404 }); + } + + const settingsPath = v3ProjectSettingsPath( + { slug: stateData.organizationSlug }, + { slug: stateData.projectSlug }, + { slug: environment.slug } + ); + + const result = await fromPromise( + createOrFindVercelIntegration(stateData.organizationId, stateData.projectId, tokenResponse, configurationId, origin), + (error) => error + ); + + if (result.isErr()) { + logger.error("Failed to complete Vercel integration", { error: result.error }); + throw redirect(settingsPath); + } + + logger.info("Vercel organization integration created successfully", { + organizationId: stateData.organizationId, + projectId: stateData.projectId, + teamId: tokenResponse.teamId, + }); + + const params = new URLSearchParams({ vercelOnboarding: "true", origin }); + if (next) { + params.set("next", next); + } + + return redirect(`${settingsPath}?${params.toString()}`); +} diff --git a/apps/webapp/app/routes/vercel.install.tsx b/apps/webapp/app/routes/vercel.install.tsx new file mode 100644 index 00000000000..6a1ca4d7a64 --- /dev/null +++ b/apps/webapp/app/routes/vercel.install.tsx @@ -0,0 +1,73 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { redirect } from "@remix-run/server-runtime"; +import { z } from "zod"; +import { $replica } from "~/db.server"; +import { requireUser } from "~/services/session.server"; +import { logger } from "~/services/logger.server"; +import { OrgIntegrationRepository } from "~/models/orgIntegration.server"; +import { generateVercelOAuthState } from "~/v3/vercel/vercelOAuthState.server"; +import { findProjectBySlug } from "~/models/project.server"; + +const QuerySchema = z.object({ + org_slug: z.string(), + project_slug: z.string(), +}); + +export const loader = async ({ request }: LoaderFunctionArgs) => { + const searchParams = new URL(request.url).searchParams; + const parsed = QuerySchema.safeParse(Object.fromEntries(searchParams)); + + if (!parsed.success) { + logger.warn("Vercel App installation redirect with invalid params", { + searchParams, + error: parsed.error, + }); + throw redirect("/"); + } + + const { org_slug, project_slug } = parsed.data; + const user = await requireUser(request); + + // Find the organization + const org = await $replica.organization.findFirst({ + where: { slug: org_slug, members: { some: { userId: user.id } }, deletedAt: null }, + orderBy: { createdAt: "desc" }, + select: { + id: true, + }, + }); + + if (!org) { + throw redirect("/"); + } + + // Find the project + const project = await findProjectBySlug(org_slug, project_slug, user.id); + if (!project) { + logger.warn("Vercel App installation attempt for non-existent project", { + org_slug, + project_slug, + userId: user.id, + }); + throw redirect("/"); + } + + // Use "prod" as the default environment slug for the redirect + // The callback will redirect to the settings page for this environment + const environmentSlug = "prod"; + + // Generate JWT state token + const stateToken = await generateVercelOAuthState({ + organizationId: org.id, + projectId: project.id, + environmentSlug, + organizationSlug: org_slug, + projectSlug: project_slug, + }); + + // Generate Vercel install URL + const vercelInstallUrl = OrgIntegrationRepository.vercelInstallUrl(stateToken); + + return redirect(vercelInstallUrl); +}; + diff --git a/apps/webapp/app/routes/vercel.onboarding.tsx b/apps/webapp/app/routes/vercel.onboarding.tsx new file mode 100644 index 00000000000..bdd1c1d05ca --- /dev/null +++ b/apps/webapp/app/routes/vercel.onboarding.tsx @@ -0,0 +1,465 @@ +import type { ActionFunctionArgs, LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { json, redirect } from "@remix-run/server-runtime"; +import { fromPromise } from "neverthrow"; +import { useEffect, useState } from "react"; +import { Form, useNavigation } from "@remix-run/react"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { BuildingOfficeIcon, FolderIcon } from "@heroicons/react/20/solid"; +import { AppContainer, MainCenteredContainer } from "~/components/layout/AppLayout"; +import { BackgroundWrapper } from "~/components/BackgroundWrapper"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { Fieldset } from "~/components/primitives/Fieldset"; +import { FormButtons } from "~/components/primitives/FormButtons"; +import { FormTitle } from "~/components/primitives/FormTitle"; +import { InputGroup } from "~/components/primitives/InputGroup"; +import { Label } from "~/components/primitives/Label"; +import { Select, SelectItem } from "~/components/primitives/Select"; +import { ButtonSpinner } from "~/components/primitives/Spinner"; +import { prisma } from "~/db.server"; +import { logger } from "~/services/logger.server"; +import { requireUserId } from "~/services/session.server"; +import { confirmBasicDetailsPath, newProjectPath } from "~/utils/pathBuilder"; +import { redirectWithErrorMessage } from "~/models/message.server"; +import { generateVercelOAuthState } from "~/v3/vercel/vercelOAuthState.server"; + +const LoaderParamsSchema = z.object({ + organizationId: z.string().optional().nullable(), + code: z.string().optional().nullable(), + configurationId: z.string().optional().nullable(), + next: z.string().optional().nullable(), + error: z.string().optional().nullable(), +}); + +const SelectOrgActionSchema = z.object({ + action: z.literal("select-org"), + organizationId: z.string(), + code: z.string(), + configurationId: z.string().optional().nullable(), + next: z.string().optional(), +}); + +const SelectProjectActionSchema = z.object({ + action: z.literal("select-project"), + projectId: z.string(), + organizationId: z.string(), + code: z.string(), + configurationId: z.string().optional().nullable(), + next: z.string().optional().nullable(), +}); + +const ActionSchema = z.discriminatedUnion("action", [ + SelectOrgActionSchema, + SelectProjectActionSchema, +]); + +export async function loader({ request }: LoaderFunctionArgs) { + const userId = await requireUserId(request); + const url = new URL(request.url); + + const params = LoaderParamsSchema.safeParse({ + organizationId: url.searchParams.get("organizationId"), + code: url.searchParams.get("code"), + configurationId: url.searchParams.get("configurationId"), + next: url.searchParams.get("next"), + error: url.searchParams.get("error"), + }); + + if (!params.success) { + logger.error("Invalid params for Vercel onboarding", { error: params.error }); + throw redirectWithErrorMessage( + "/", + request, + "Invalid installation parameters. Please try again from Vercel." + ); + } + + const { error } = params.data; + if (error === "expired") { + return typedjson({ + step: "error" as const, + error: "Your installation session has expired. Please start the installation again.", + code: params.data.code ?? null, + configurationId: params.data.configurationId ?? null, + next: params.data.next ?? null, + }); + } + + if (!params.data.code) { + logger.error("Missing code parameter for Vercel onboarding"); + throw redirectWithErrorMessage( + "/", + request, + "Invalid installation parameters. Please try again from Vercel." + ); + } + + const code = params.data.code; + + const organizations = await prisma.organization.findMany({ + where: { + members: { + some: { userId }, + }, + deletedAt: null, + }, + select: { + id: true, + title: true, + slug: true, + projects: { + where: { + deletedAt: null, + }, + select: { + id: true, + name: true, + slug: true, + }, + orderBy: { + createdAt: "asc", + }, + }, + }, + orderBy: { + createdAt: "asc", + }, + }); + + // New user: no organizations + if (organizations.length === 0) { + const onboardingParams = new URLSearchParams(); + onboardingParams.set("code", code); + if (params.data.configurationId) { + onboardingParams.set("configurationId", params.data.configurationId); + } + onboardingParams.set("integration", "vercel"); + if (params.data.next) { + onboardingParams.set("next", params.data.next); + } + throw redirect(`${confirmBasicDetailsPath()}?${onboardingParams.toString()}`); + } + + // If organizationId is provided, show project selection + if (params.data.organizationId) { + const organization = organizations.find((org) => org.id === params.data.organizationId); + + if (!organization) { + logger.error("Organization not found or access denied", { + organizationId: params.data.organizationId, + userId, + }); + throw redirectWithErrorMessage( + "/", + request, + "Organization not found. Please try again." + ); + } + + return typedjson({ + step: "project" as const, + organization, + organizations, + code: code, + configurationId: params.data.configurationId ?? null, + next: params.data.next ?? null, + }); + } + + return typedjson({ + step: "org" as const, + organizations, + code: code, + configurationId: params.data.configurationId ?? null, + next: params.data.next ?? null, + }); +} + +export async function action({ request }: ActionFunctionArgs) { + const userId = await requireUserId(request); + const formData = await request.formData(); + + const submission = ActionSchema.safeParse({ + action: formData.get("action"), + organizationId: formData.get("organizationId"), + projectId: formData.get("projectId"), + code: formData.get("code"), + configurationId: formData.get("configurationId"), + next: formData.get("next"), + }); + + if (!submission.success) { + return json({ error: "Invalid submission" }, { status: 400 }); + } + + const { code, configurationId, next } = submission.data; + + // Handle org selection + if (submission.data.action === "select-org") { + const { organizationId } = submission.data; + + const projectParams = new URLSearchParams(); + projectParams.set("organizationId", organizationId); + projectParams.set("code", code); + if (configurationId) { + projectParams.set("configurationId", configurationId); + } + if (next) { + projectParams.set("next", next); + } + + return redirect(`/vercel/onboarding?${projectParams.toString()}`); + } + + // Handle project selection + const { projectId, organizationId } = submission.data; + + const project = await prisma.project.findFirst({ + where: { + id: projectId, + organizationId, + deletedAt: null, + organization: { + members: { some: { userId } }, + }, + }, + include: { + organization: true, + }, + }); + + if (!project) { + logger.error("Project not found or access denied", { projectId, userId }); + return json({ error: "Project not found" }, { status: 404 }); + } + + const environment = await prisma.runtimeEnvironment.findFirst({ + where: { + projectId: project.id, + slug: "prod", + archivedAt: null, + }, + }); + + if (!environment) { + logger.error("Environment not found", { projectId: project.id }); + return json({ error: "Environment not found" }, { status: 404 }); + } + + const stateResult = await fromPromise( + generateVercelOAuthState({ + organizationId: project.organizationId, + projectId: project.id, + environmentSlug: environment.slug, + organizationSlug: project.organization.slug, + projectSlug: project.slug, + }), + (error) => error + ); + + if (stateResult.isErr()) { + logger.error("Failed to generate Vercel OAuth state", { error: stateResult.error }); + return json({ error: "Failed to generate installation state" }, { status: 500 }); + } + + const params = new URLSearchParams(); + params.set("state", stateResult.value); + params.set("code", code); + if (configurationId) { + params.set("configurationId", configurationId); + } + params.set("origin", "marketplace"); + if (next) { + params.set("next", next); + } + + return redirect(`/vercel/connect?${params.toString()}`, 303); +} + +export default function VercelOnboardingPage() { + const data = useTypedLoaderData(); + const navigation = useNavigation(); + const isSubmitting = navigation.state === "submitting"; + const [isInstalling, setIsInstalling] = useState(false); + + // Reset isInstalling when navigation returns to idle (e.g. on error) + useEffect(() => { + if (navigation.state === "idle" && isInstalling) { + setIsInstalling(false); + } + }, [navigation.state, isInstalling]); + + if (data.step === "error") { + return ( + + + + + + + + + ); + } + + if (data.step === "org") { + const newOrgUrl = (() => { + const params = new URLSearchParams(); + params.set("code", data.code); + if (data.configurationId) { + params.set("configurationId", data.configurationId); + } + params.set("integration", "vercel"); + if (data.next) { + params.set("next", data.next); + } + return `/orgs/new?${params.toString()}`; + })(); + + return ( + + + + } + title="Select Organization" + description="Choose which organization to install the Vercel integration into." + /> +
+ + + {data.configurationId && ( + + )} + {data.next && } + +
+ + + + + + + + New Organization + + +
+ } + /> + + + + + + ); + } + + const newProjectUrl = (() => { + const params = new URLSearchParams(); + params.set("code", data.code); + if (data.configurationId) { + params.set("configurationId", data.configurationId); + } + params.set("integration", "vercel"); + params.set("organizationId", data.organization.id); + if (data.next) { + params.set("next", data.next); + } + return `${newProjectPath({ slug: data.organization.slug })}?${params.toString()}`; + })(); + + const isLoading = isSubmitting || isInstalling; + + return ( + + + + } + title="Select Project" + description={`Choose which project in "${data.organization.title}" to install the Vercel integration into.`} + /> +
setIsInstalling(true)}> + + + + {data.configurationId && ( + + )} + {data.next && } + +
+ + + + + + + + New Project + + +
+ } + /> + + + + + + ); +} diff --git a/apps/webapp/app/services/org.server.ts b/apps/webapp/app/services/org.server.ts new file mode 100644 index 00000000000..75c1467ab24 --- /dev/null +++ b/apps/webapp/app/services/org.server.ts @@ -0,0 +1,20 @@ +import { prisma } from "~/db.server"; +import { requireUserId } from "./session.server"; + +export async function requireOrganization(request: Request, organizationSlug: string) { + const userId = await requireUserId(request); + + const organization = await prisma.organization.findFirst({ + where: { + slug: organizationSlug, + members: { some: { userId } }, + deletedAt: null, + }, + }); + + if (!organization) { + throw new Response("Organization not found", { status: 404 }); + } + + return { organization, userId }; +} diff --git a/apps/webapp/app/services/postAuth.server.ts b/apps/webapp/app/services/postAuth.server.ts index 39e914129a1..feb42ccaef8 100644 --- a/apps/webapp/app/services/postAuth.server.ts +++ b/apps/webapp/app/services/postAuth.server.ts @@ -10,5 +10,8 @@ export async function postAuthentication({ loginMethod: User["authenticationMethod"]; isNewUser: boolean; }) { - telemetry.user.identify({ user, isNewUser }); + telemetry.user.identify({ + user, + isNewUser, + }); } diff --git a/apps/webapp/app/services/referralSource.server.ts b/apps/webapp/app/services/referralSource.server.ts new file mode 100644 index 00000000000..e98c8ebcb2c --- /dev/null +++ b/apps/webapp/app/services/referralSource.server.ts @@ -0,0 +1,53 @@ +import { createCookie } from "@remix-run/node"; +import { z } from "zod"; +import { prisma } from "~/db.server"; +import { env } from "~/env.server"; +import { telemetry } from "~/services/telemetry.server"; + +const ReferralSourceSchema = z.enum(["vercel"]); + +export type ReferralSource = z.infer; + +// Cookie that persists for 1 hour to track referral source during login flow +export const referralSourceCookie = createCookie("referral-source", { + maxAge: 60 * 60, // 1 hour + httpOnly: true, + sameSite: "lax", + secure: env.NODE_ENV === "production", +}); + +export async function getReferralSource(request: Request): Promise { + const cookie = request.headers.get("Cookie"); + const value = await referralSourceCookie.parse(cookie); + const parsed = ReferralSourceSchema.safeParse(value); + return parsed.success ? parsed.data : null; +} + +export async function setReferralSourceCookie(source: ReferralSource): Promise { + return referralSourceCookie.serialize(source); +} + +export async function clearReferralSourceCookie(): Promise { + return referralSourceCookie.serialize("", { + maxAge: 0, + }); +} + +export async function trackAndClearReferralSource( + request: Request, + userId: string, + headers: Headers +): Promise { + const referralSource = await getReferralSource(request); + if (!referralSource) return; + + headers.append("Set-Cookie", await clearReferralSourceCookie()); + + const user = await prisma.user.findUnique({ where: { id: userId } }); + if (!user) return; + + const userAge = Date.now() - user.createdAt.getTime(); + if (userAge >= 30 * 1000) return; + + telemetry.user.identify({ user, isNewUser: true, referralSource }); +} diff --git a/apps/webapp/app/services/telemetry.server.ts b/apps/webapp/app/services/telemetry.server.ts index 98ca11ed908..f8bd3d3d993 100644 --- a/apps/webapp/app/services/telemetry.server.ts +++ b/apps/webapp/app/services/telemetry.server.ts @@ -28,18 +28,32 @@ class Telemetry { } user = { - identify: ({ user, isNewUser }: { user: User; isNewUser: boolean }) => { + identify: ({ + user, + isNewUser, + referralSource, + }: { + user: User; + isNewUser: boolean; + referralSource?: string; + }) => { if (this.#posthogClient) { + const properties: Record = { + email: user.email, + name: user.name, + authenticationMethod: user.authenticationMethod, + admin: user.admin, + createdAt: user.createdAt, + isNewUser, + }; + + if (referralSource) { + properties.referralSource = referralSource; + } + this.#posthogClient.identify({ distinctId: user.id, - properties: { - email: user.email, - name: user.name, - authenticationMethod: user.authenticationMethod, - admin: user.admin, - createdAt: user.createdAt, - isNewUser, - }, + properties, }); } if (isNewUser) { diff --git a/apps/webapp/app/services/vercelIntegration.server.ts b/apps/webapp/app/services/vercelIntegration.server.ts new file mode 100644 index 00000000000..d9b70eae3a5 --- /dev/null +++ b/apps/webapp/app/services/vercelIntegration.server.ts @@ -0,0 +1,656 @@ +import type { + PrismaClient, + OrganizationProjectIntegration, + OrganizationIntegration, + SecretReference, +} from "@trigger.dev/database"; +import { ResultAsync } from "neverthrow"; +import { prisma, $transaction } from "~/db.server"; +import { logger } from "~/services/logger.server"; +import { VercelIntegrationRepository } from "~/models/vercelIntegration.server"; +import { findCurrentWorkerDeployment } from "~/v3/models/workerDeployment.server"; +import { + VercelProjectIntegrationDataSchema, + VercelProjectIntegrationData, + VercelIntegrationConfig, + SyncEnvVarsMapping, + TriggerEnvironmentType, + EnvSlug, + envTypeToSlug, + createDefaultVercelIntegrationData, +} from "~/v3/vercel/vercelProjectIntegrationSchema"; + +export type VercelProjectIntegrationWithParsedData = OrganizationProjectIntegration & { + parsedIntegrationData: VercelProjectIntegrationData; +}; + +export type VercelProjectIntegrationWithData = VercelProjectIntegrationWithParsedData & { + organizationIntegration: OrganizationIntegration; +}; + +export type VercelProjectIntegrationWithProject = VercelProjectIntegrationWithData & { + project: { + id: string; + name: string; + slug: string; + }; +}; + +export class VercelIntegrationService { + #prismaClient: PrismaClient; + + constructor(prismaClient: PrismaClient = prisma) { + this.#prismaClient = prismaClient; + } + + async getVercelProjectIntegration( + projectId: string, + ): Promise { + const integration = await this.#prismaClient.organizationProjectIntegration.findFirst({ + where: { + projectId, + deletedAt: null, + organizationIntegration: { + service: "VERCEL", + deletedAt: null, + }, + }, + include: { + organizationIntegration: true, + }, + }); + + if (!integration) { + return null; + } + + const parsedData = VercelProjectIntegrationDataSchema.safeParse(integration.integrationData); + + if (!parsedData.success) { + logger.error("Failed to parse Vercel integration data", { + projectId, + integrationId: integration.id, + error: parsedData.error, + }); + return null; + } + + return { + ...integration, + parsedIntegrationData: parsedData.data, + }; + } + + async getConnectedVercelProjects( + organizationId: string + ): Promise { + const integrations = await this.#prismaClient.organizationProjectIntegration.findMany({ + where: { + deletedAt: null, + organizationIntegration: { + organizationId, + service: "VERCEL", + deletedAt: null, + }, + }, + include: { + organizationIntegration: true, + project: { + select: { + id: true, + name: true, + slug: true, + }, + }, + }, + }); + + return integrations + .map((integration) => { + const parsedData = VercelProjectIntegrationDataSchema.safeParse(integration.integrationData); + if (!parsedData.success) { + logger.error("Failed to parse Vercel integration data", { + integrationId: integration.id, + error: parsedData.error, + }); + return null; + } + + return { + ...integration, + parsedIntegrationData: parsedData.data, + }; + }) + .filter((i): i is VercelProjectIntegrationWithProject => i !== null); + } + + async createVercelProjectIntegration(params: { + organizationIntegrationId: string; + projectId: string; + vercelProjectId: string; + vercelProjectName: string; + vercelTeamId: string | null; + vercelTeamSlug?: string; + installedByUserId?: string; + }): Promise { + const integrationData = createDefaultVercelIntegrationData( + params.vercelProjectId, + params.vercelProjectName, + params.vercelTeamId, + params.vercelTeamSlug + ); + + return this.#prismaClient.organizationProjectIntegration.create({ + data: { + organizationIntegrationId: params.organizationIntegrationId, + projectId: params.projectId, + externalEntityId: params.vercelProjectId, + integrationData: integrationData, + installedBy: params.installedByUserId, + }, + }); + } + + async selectVercelProject(params: { + organizationId: string; + projectId: string; + vercelProjectId: string; + vercelProjectName: string; + userId: string; + }): Promise<{ + integration: OrganizationProjectIntegration; + syncResult: { success: boolean; errors: string[] }; + }> { + const orgIntegration = await VercelIntegrationRepository.findVercelOrgIntegrationByOrganization( + params.organizationId + ); + + if (!orgIntegration) { + throw new Error("No Vercel organization integration found"); + } + + const teamId = await VercelIntegrationRepository.getTeamIdFromIntegration(orgIntegration); + + const vercelTeamSlug = await VercelIntegrationRepository.getVercelClient(orgIntegration) + .andThen((client) => VercelIntegrationRepository.getTeamSlug(client, teamId)) + .match( + (slug) => slug, + () => undefined + ); + + // Use a serializable transaction to prevent duplicate project integrations + // from concurrent selectVercelProject calls (read-then-write race condition). + const txResult = await $transaction( + this.#prismaClient, + "selectVercelProject", + async (tx) => { + const existing = await tx.organizationProjectIntegration.findFirst({ + where: { + projectId: params.projectId, + deletedAt: null, + organizationIntegration: { + service: "VERCEL", + deletedAt: null, + }, + }, + include: { + organizationIntegration: true, + }, + }); + + if (existing) { + const parsedData = VercelProjectIntegrationDataSchema.safeParse( + existing.integrationData + ); + + const updated = await tx.organizationProjectIntegration.update({ + where: { id: existing.id }, + data: { + externalEntityId: params.vercelProjectId, + integrationData: { + ...(parsedData.success ? parsedData.data : {}), + vercelProjectId: params.vercelProjectId, + vercelProjectName: params.vercelProjectName, + vercelTeamId: teamId, + vercelTeamSlug, + }, + }, + }); + + return { + integration: updated, + wasCreated: false, + vercelStagingEnvironment: parsedData.success + ? parsedData.data.config.vercelStagingEnvironment + : null, + }; + } + + const integrationData = createDefaultVercelIntegrationData( + params.vercelProjectId, + params.vercelProjectName, + teamId, + vercelTeamSlug + ); + + const created = await tx.organizationProjectIntegration.create({ + data: { + organizationIntegrationId: orgIntegration.id, + projectId: params.projectId, + externalEntityId: params.vercelProjectId, + integrationData: integrationData, + installedBy: params.userId, + }, + }); + + return { + integration: created, + wasCreated: true, + vercelStagingEnvironment: null, + }; + }, + { isolationLevel: "Serializable" } + ); + + if (!txResult) { + throw new Error("Failed to select Vercel project: transaction returned undefined"); + } + + const { integration, wasCreated, vercelStagingEnvironment } = txResult; + + const syncResultAsync = await VercelIntegrationRepository.syncApiKeysToVercel({ + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + teamId, + vercelStagingEnvironment, + orgIntegration, + }); + const syncResult = syncResultAsync.isOk() + ? { success: syncResultAsync.value.errors.length === 0, errors: syncResultAsync.value.errors } + : { success: false, errors: [syncResultAsync.error.message] }; + + if (wasCreated) { + const disableResult = await VercelIntegrationRepository.getVercelClient(orgIntegration) + .andThen((client) => + VercelIntegrationRepository.disableAutoAssignCustomDomains( + client, + params.vercelProjectId, + teamId + ) + ); + + if (disableResult.isErr()) { + logger.warn("Failed to disable autoAssignCustomDomains during project selection", { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + error: disableResult.error.message, + }); + } + + logger.info("Vercel project selected and API keys synced", { + projectId: params.projectId, + vercelProjectId: params.vercelProjectId, + vercelProjectName: params.vercelProjectName, + syncSuccess: syncResult.success, + syncErrors: syncResult.errors, + }); + } + + return { integration, syncResult }; + } + + async updateVercelIntegrationConfig( + projectId: string, + configUpdates: Partial + ): Promise { + const existing = await this.getVercelProjectIntegration(projectId); + if (!existing) { + return null; + } + + const updatedConfig = { + ...existing.parsedIntegrationData.config, + ...configUpdates, + }; + + const updatedData: VercelProjectIntegrationData = { + ...existing.parsedIntegrationData, + config: updatedConfig, + }; + + const updated = await this.#prismaClient.organizationProjectIntegration.update({ + where: { id: existing.id }, + data: { + integrationData: updatedData, + }, + }); + + if (!updatedConfig.atomicBuilds?.includes("prod")) { + return { ...updated, parsedIntegrationData: updatedData }; + } + + const orgIntegration = await VercelIntegrationRepository.findVercelOrgIntegrationForProject( + projectId + ); + + if (orgIntegration) { + await this.#syncTriggerVersionToVercelProduction( + projectId, + updatedConfig.atomicBuilds, + orgIntegration + ); + } + + return { + ...updated, + parsedIntegrationData: updatedData, + }; + } + + async updateSyncEnvVarsMapping( + projectId: string, + syncEnvVarsMapping: SyncEnvVarsMapping + ): Promise { + const existing = await this.getVercelProjectIntegration(projectId); + if (!existing) { + return null; + } + + const updatedData: VercelProjectIntegrationData = { + ...existing.parsedIntegrationData, + syncEnvVarsMapping, + }; + + const updated = await this.#prismaClient.organizationProjectIntegration.update({ + where: { id: existing.id }, + data: { + integrationData: updatedData, + }, + }); + + return { + ...updated, + parsedIntegrationData: updatedData, + }; + } + + async updateSyncEnvVarForEnvironment( + projectId: string, + envVarKey: string, + environmentType: TriggerEnvironmentType, + syncEnabled: boolean + ): Promise { + const existing = await this.getVercelProjectIntegration(projectId); + if (!existing) { + return null; + } + + const currentMapping = existing.parsedIntegrationData.syncEnvVarsMapping || {}; + const envSlug = envTypeToSlug(environmentType); + + const currentEnvSettings = currentMapping[envSlug] || {}; + + const updatedMapping: SyncEnvVarsMapping = { + ...currentMapping, + [envSlug]: { + ...currentEnvSettings, + [envVarKey]: syncEnabled, + }, + }; + + const updatedData: VercelProjectIntegrationData = { + ...existing.parsedIntegrationData, + syncEnvVarsMapping: updatedMapping, + }; + + const updated = await this.#prismaClient.organizationProjectIntegration.update({ + where: { id: existing.id }, + data: { + integrationData: updatedData, + }, + }); + + return { + ...updated, + parsedIntegrationData: updatedData, + }; + } + + async removeSyncEnvVarForEnvironment( + projectId: string, + envVarKey: string, + environmentType: TriggerEnvironmentType + ): Promise { + const existing = await this.getVercelProjectIntegration(projectId); + if (!existing) return; + + const currentMapping = existing.parsedIntegrationData.syncEnvVarsMapping || {}; + const envSlug = envTypeToSlug(environmentType); + const currentEnvSettings = currentMapping[envSlug]; + if (!currentEnvSettings || !(envVarKey in currentEnvSettings)) return; + + const { [envVarKey]: _, ...rest } = currentEnvSettings; + const updatedMapping = { ...currentMapping, [envSlug]: rest }; + + await this.#prismaClient.organizationProjectIntegration.update({ + where: { id: existing.id }, + data: { + integrationData: { + ...existing.parsedIntegrationData, + syncEnvVarsMapping: updatedMapping, + }, + }, + }); + } + + async completeOnboarding( + projectId: string, + params: { + vercelStagingEnvironment?: { environmentId: string; displayName: string } | null; + pullEnvVarsBeforeBuild?: EnvSlug[] | null; + atomicBuilds?: EnvSlug[] | null; + discoverEnvVars?: EnvSlug[] | null; + syncEnvVarsMapping?: SyncEnvVarsMapping; + } + ): Promise { + const existing = await this.getVercelProjectIntegration(projectId); + if (!existing) { + return null; + } + + const syncEnvVarsMapping = params.syncEnvVarsMapping ?? { "dev":{}, "stg":{}, "prod":{}, "preview":{} }; + const updatedData: VercelProjectIntegrationData = { + ...existing.parsedIntegrationData, + config: { + ...existing.parsedIntegrationData.config, + pullEnvVarsBeforeBuild: params.pullEnvVarsBeforeBuild ?? null, + atomicBuilds: params.atomicBuilds ?? null, + discoverEnvVars: params.discoverEnvVars ?? null, + vercelStagingEnvironment: params.vercelStagingEnvironment ?? null, + }, + //This is intentionally not updated here, in case of resetting the onboarding it should not override the existing mapping with an empty one + syncEnvVarsMapping: existing.parsedIntegrationData.syncEnvVarsMapping, + onboardingCompleted: true, + }; + + const updated = await this.#prismaClient.organizationProjectIntegration.update({ + where: { id: existing.id }, + data: { + integrationData: updatedData, + }, + }); + + const orgIntegration = await VercelIntegrationRepository.findVercelOrgIntegrationForProject( + projectId + ); + + if (orgIntegration) { + const teamId = await VercelIntegrationRepository.getTeamIdFromIntegration(orgIntegration); + + const pullResult = await VercelIntegrationRepository.pullEnvVarsFromVercel({ + projectId, + vercelProjectId: updatedData.vercelProjectId, + teamId, + vercelStagingEnvironment: params.vercelStagingEnvironment, + syncEnvVarsMapping, + orgIntegration, + }); + + if (pullResult.isErr()) { + logger.error("Failed to pull env vars from Vercel during onboarding", { + projectId, + error: pullResult.error.message, + }); + } else if (pullResult.value.errors.length > 0) { + logger.warn("Errors pulling env vars from Vercel during onboarding", { + projectId, + errors: pullResult.value.errors, + }); + } + + await this.#syncTriggerVersionToVercelProduction( + projectId, + updatedData.config.atomicBuilds, + orgIntegration + ); + } + + return { + ...updated, + parsedIntegrationData: updatedData, + }; + } + + async #syncTriggerVersionToVercelProduction( + projectId: string, + atomicBuilds: string[] | null | undefined, + orgIntegration: OrganizationIntegration & { tokenReference: SecretReference } + ): Promise { + if (!atomicBuilds?.includes("prod")) { + return; + } + + const prodEnvironment = await this.#prismaClient.runtimeEnvironment.findFirst({ + where: { + projectId, + type: "PRODUCTION", + }, + select: { + id: true, + }, + }); + + if (!prodEnvironment) { + return; + } + + const currentDeployment = await findCurrentWorkerDeployment({ + environmentId: prodEnvironment.id, + }); + + if (!currentDeployment?.version) { + return; + } + + const clientResult = await VercelIntegrationRepository.getVercelClient(orgIntegration); + if (clientResult.isErr()) { + logger.error("Failed to get Vercel client for TRIGGER_VERSION sync", { + projectId, + error: clientResult.error.message, + }); + return; + } + const client = clientResult.value; + const teamId = await VercelIntegrationRepository.getTeamIdFromIntegration(orgIntegration); + + // Get the Vercel project ID from the project integration + const projectIntegration = await this.#prismaClient.organizationProjectIntegration.findFirst({ + where: { + projectId, + organizationIntegrationId: orgIntegration.id, + deletedAt: null, + }, + select: { + externalEntityId: true, + }, + }); + + if (!projectIntegration) { + return; + } + + const vercelProjectId = projectIntegration.externalEntityId; + + // Check if TRIGGER_VERSION already exists targeting production + const envVarsResult = await VercelIntegrationRepository.getVercelEnvironmentVariables( + client, + vercelProjectId, + teamId + ); + + if (envVarsResult.isErr()) { + logger.warn("Failed to fetch Vercel env vars for TRIGGER_VERSION sync", { + projectId, + vercelProjectId, + error: envVarsResult.error.message, + }); + return; + } + + const existingTriggerVersion = envVarsResult.value.find( + (env) => env.key === "TRIGGER_VERSION" && env.target.includes("production") + ); + + if (existingTriggerVersion) { + return; + } + + // Push TRIGGER_VERSION to Vercel production + const createResult = await ResultAsync.fromPromise( + client.projects.createProjectEnv({ + idOrName: vercelProjectId, + ...(teamId && { teamId }), + upsert: "true", + requestBody: { + key: "TRIGGER_VERSION", + value: currentDeployment.version, + target: ["production"] as any, + type: "encrypted", + }, + }), + (error) => error + ); + + if (createResult.isErr()) { + logger.error("Failed to sync TRIGGER_VERSION to Vercel production", { + projectId, + vercelProjectId, + error: createResult.error instanceof Error ? createResult.error.message : String(createResult.error), + }); + return; + } + + logger.info("Synced TRIGGER_VERSION to Vercel production", { + projectId, + vercelProjectId, + version: currentDeployment.version, + }); + } + + async disconnectVercelProject(projectId: string): Promise { + const existing = await this.getVercelProjectIntegration(projectId); + if (!existing) { + return false; + } + + await this.#prismaClient.organizationProjectIntegration.update({ + where: { id: existing.id }, + data: { + deletedAt: new Date(), + }, + }); + + return true; + } +} + diff --git a/apps/webapp/app/utils/pathBuilder.ts b/apps/webapp/app/utils/pathBuilder.ts index 639f2f72947..d030243f2dd 100644 --- a/apps/webapp/app/utils/pathBuilder.ts +++ b/apps/webapp/app/utils/pathBuilder.ts @@ -121,6 +121,14 @@ export function organizationSettingsPath(organization: OrgForPath) { return `${organizationPath(organization)}/settings`; } +export function organizationIntegrationsPath(organization: OrgForPath) { + return `${organizationPath(organization)}/settings/integrations`; +} + +export function organizationVercelIntegrationPath(organization: OrgForPath) { + return `${organizationIntegrationsPath(organization)}/vercel`; +} + function organizationParam(organization: OrgForPath) { return organization.slug; } @@ -151,6 +159,22 @@ export function githubAppInstallPath(organizationSlug: string, redirectTo: strin )}`; } +export function vercelAppInstallPath(organizationSlug: string, projectSlug: string) { + return `/vercel/install?org_slug=${organizationSlug}&project_slug=${projectSlug}`; +} + +export function vercelCallbackPath() { + return `/vercel/callback`; +} + +export function vercelResourcePath( + organizationSlug: string, + projectSlug: string, + environmentSlug: string +) { + return `/resources/orgs/${organizationSlug}/projects/${projectSlug}/env/${environmentSlug}/vercel`; +} + export function v3EnvironmentPath( organization: OrgForPath, project: ProjectForPath, diff --git a/apps/webapp/app/v3/environmentVariables/environmentVariablesRepository.server.ts b/apps/webapp/app/v3/environmentVariables/environmentVariablesRepository.server.ts index 0ade9436d47..39d0c863cbc 100644 --- a/apps/webapp/app/v3/environmentVariables/environmentVariablesRepository.server.ts +++ b/apps/webapp/app/v3/environmentVariables/environmentVariablesRepository.server.ts @@ -6,9 +6,12 @@ import { env } from "~/env.server"; import { getSecretStore } from "~/services/secrets/secretStore.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { + type CreateEnvironmentVariables, type CreateResult, type DeleteEnvironmentVariable, type DeleteEnvironmentVariableValue, + type EditEnvironmentVariable, + type EditEnvironmentVariableValue, type EnvironmentVariable, type EnvironmentVariableWithSecret, type ProjectEnvironmentVariable, @@ -45,18 +48,7 @@ const SecretValue = z.object({ secret: z.string() }); export class EnvironmentVariablesRepository implements Repository { constructor(private prismaClient: PrismaClient = prisma) {} - async create( - projectId: string, - options: { - override: boolean; - environmentIds: string[]; - isSecret?: boolean; - variables: { - key: string; - value: string; - }[]; - } - ): Promise { + async create(projectId: string, options: CreateEnvironmentVariables): Promise { const project = await this.prismaClient.project.findFirst({ where: { id: projectId, @@ -164,10 +156,49 @@ export class EnvironmentVariablesRepository implements Repository { prismaClient: tx, }); + // If parentEnvironmentId is provided and isSecret is not explicitly set, + // look up if the parent has this variable marked as secret + let inheritedIsSecret: boolean | undefined = undefined; + if (options.isSecret === undefined && options.parentEnvironmentId) { + const parentVariableValue = await tx.environmentVariableValue.findFirst({ + where: { + variableId: environmentVariable.id, + environmentId: options.parentEnvironmentId, + }, + select: { + isSecret: true, + }, + }); + if (parentVariableValue?.isSecret) { + inheritedIsSecret = true; + } + } + + const effectiveIsSecret = options.isSecret ?? inheritedIsSecret; + //set the secret values and references for (const environmentId of options.environmentIds) { const key = secretKey(projectId, environmentId, variable.key); + const existingValueRecord = await tx.environmentVariableValue.findFirst({ + where: { + variableId: environmentVariable.id, + environmentId, + }, + }); + + // Check if value already exists and is the same, and no metadata change (e.g. isSecret toggle) + const existingSecret = await secretStore.getSecret(SecretValue, key); + const canSkip = + existingSecret && + existingSecret.secret === variable.value && + existingValueRecord && + (options.isSecret === undefined || + existingValueRecord.isSecret === options.isSecret); + if (canSkip) { + continue; + } + //create the secret reference const secretReference = await tx.secretReference.upsert({ where: { @@ -180,23 +211,36 @@ export class EnvironmentVariablesRepository implements Repository { update: {}, }); - const variableValue = await tx.environmentVariableValue.upsert({ - where: { - variableId_environmentId: { + if (existingValueRecord) { + await tx.environmentVariableValue.update({ + where: { + id: existingValueRecord.id, + }, + data: { + version: { + increment: 1, + }, + ...(options.lastUpdatedBy ? { lastUpdatedBy: options.lastUpdatedBy } : {}), + valueReferenceId: secretReference.id, + ...(options.isSecret !== undefined + ? { + isSecret: options.isSecret, + } + : {}), + }, + }); + } else { + await tx.environmentVariableValue.create({ + data: { variableId: environmentVariable.id, - environmentId, + environmentId: environmentId, + valueReferenceId: secretReference.id, + isSecret: effectiveIsSecret, + version: 1, + lastUpdatedBy: options.lastUpdatedBy ? options.lastUpdatedBy : Prisma.JsonNull, }, - }, - create: { - variableId: environmentVariable.id, - environmentId: environmentId, - valueReferenceId: secretReference.id, - isSecret: options.isSecret, - }, - update: { - isSecret: options.isSecret, - }, - }); + }); + } await secretStore.setSecret<{ secret: string }>(key, { secret: variable.value, @@ -226,14 +270,7 @@ export class EnvironmentVariablesRepository implements Repository { } } - async edit( - projectId: string, - options: { - values: { value: string; environmentId: string }[]; - id: string; - keepEmptyValues?: boolean; - } - ): Promise { + async edit(projectId: string, options: EditEnvironmentVariable): Promise { const project = await this.prismaClient.project.findFirst({ where: { id: projectId, @@ -323,6 +360,20 @@ export class EnvironmentVariablesRepository implements Repository { await secretStore.setSecret<{ secret: string }>(key, { secret: value.value, }); + await tx.environmentVariableValue.update({ + where: { + variableId_environmentId: { + variableId: environmentVariable.id, + environmentId: value.environmentId, + }, + }, + data: { + version: { + increment: 1, + }, + lastUpdatedBy: options.lastUpdatedBy ? options.lastUpdatedBy : undefined, + }, + }); } continue; } @@ -340,6 +391,8 @@ export class EnvironmentVariablesRepository implements Repository { variableId: environmentVariable.id, environmentId: value.environmentId, valueReferenceId: secretReference.id, + version: 1, + lastUpdatedBy: options.lastUpdatedBy ? options.lastUpdatedBy : Prisma.JsonNull, }, }); @@ -360,14 +413,7 @@ export class EnvironmentVariablesRepository implements Repository { } } - async editValue( - projectId: string, - options: { - id: string; - environmentId: string; - value: string; - } - ): Promise { + async editValue(projectId: string, options: EditEnvironmentVariableValue): Promise { const project = await this.prismaClient.project.findFirst({ where: { id: projectId, @@ -426,6 +472,21 @@ export class EnvironmentVariablesRepository implements Repository { await secretStore.setSecret<{ secret: string }>(key, { secret: options.value, }); + + await tx.environmentVariableValue.update({ + where: { + variableId_environmentId: { + variableId: environmentVariable.id, + environmentId: options.environmentId, + }, + }, + data: { + version: { + increment: 1, + }, + lastUpdatedBy: options.lastUpdatedBy ? options.lastUpdatedBy : undefined, + }, + }); }); return { diff --git a/apps/webapp/app/v3/environmentVariables/repository.ts b/apps/webapp/app/v3/environmentVariables/repository.ts index 521e22f7a28..ea027bc2ca8 100644 --- a/apps/webapp/app/v3/environmentVariables/repository.ts +++ b/apps/webapp/app/v3/environmentVariables/repository.ts @@ -6,9 +6,25 @@ export const EnvironmentVariableKey = z .nonempty("Key is required") .regex(/^\w+$/, "Keys can only use alphanumeric characters and underscores"); +export const EnvironmentVariableUpdaterSchema = z.discriminatedUnion("type", [ + z.object({ + type: z.literal("user"), + userId: z.string(), + }), + z.object({ + type: z.literal("integration"), + integration: z.string(), + }), +]); +export type EnvironmentVariableUpdater = z.infer; + export const CreateEnvironmentVariables = z.object({ + override: z.boolean(), environmentIds: z.array(z.string()), + isSecret: z.boolean().optional(), + parentEnvironmentId: z.string().optional(), variables: z.array(z.object({ key: EnvironmentVariableKey, value: z.string() })), + lastUpdatedBy: EnvironmentVariableUpdaterSchema.optional(), }); export type CreateEnvironmentVariables = z.infer; @@ -32,6 +48,7 @@ export const EditEnvironmentVariable = z.object({ }) ), keepEmptyValues: z.boolean().optional(), + lastUpdatedBy: EnvironmentVariableUpdaterSchema.optional(), }); export type EditEnvironmentVariable = z.infer; @@ -51,6 +68,7 @@ export const EditEnvironmentVariableValue = z.object({ id: z.string(), environmentId: z.string(), value: z.string(), + lastUpdatedBy: EnvironmentVariableUpdaterSchema.optional(), }); export type EditEnvironmentVariableValue = z.infer; diff --git a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts index a27d7380942..debb176da57 100644 --- a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts +++ b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts @@ -22,6 +22,7 @@ import { environmentTitle } from "~/components/environments/EnvironmentLabel"; import { type Prisma, type prisma, type PrismaClientOrTransaction } from "~/db.server"; import { env } from "~/env.server"; import { + isIntegrationForService, type OrganizationIntegrationForService, OrgIntegrationRepository, } from "~/models/orgIntegration.server"; @@ -644,7 +645,7 @@ export class DeliverAlertService extends BaseService { }, }); - if (!integration) { + if (!integration || !isIntegrationForService(integration, "SLACK")) { logger.error("[DeliverAlert] Slack integration not found", { alert, }); diff --git a/apps/webapp/app/v3/services/initializeDeployment.server.ts b/apps/webapp/app/v3/services/initializeDeployment.server.ts index 52a968792c5..96439d94d61 100644 --- a/apps/webapp/app/v3/services/initializeDeployment.server.ts +++ b/apps/webapp/app/v3/services/initializeDeployment.server.ts @@ -221,6 +221,7 @@ export class InitializeDeploymentService extends BaseService { imageReference: imageRef, imagePlatform: env.DEPLOY_IMAGE_PLATFORM, git: payload.gitMeta ?? undefined, + commitSHA: payload.gitMeta?.commitSha ?? undefined, runtime: payload.runtime ?? undefined, triggeredVia: payload.triggeredVia ?? undefined, startedAt: initialStatus === "BUILDING" ? new Date() : undefined, diff --git a/apps/webapp/app/v3/vercel/index.ts b/apps/webapp/app/v3/vercel/index.ts new file mode 100644 index 00000000000..f34f0b64c6b --- /dev/null +++ b/apps/webapp/app/v3/vercel/index.ts @@ -0,0 +1,17 @@ +export * from "./vercelProjectIntegrationSchema"; + +export function getVercelInstallParams(request: Request) { + const url = new URL(request.url); + const code = url.searchParams.get("code"); + const configurationId = url.searchParams.get("configurationId"); + const integration = url.searchParams.get("integration"); + const next = url.searchParams.get("next"); + + if (code && configurationId && (integration === "vercel" || !integration)) { + return { code, configurationId, next }; + } + + return null; +} + + diff --git a/apps/webapp/app/v3/vercel/vercelOAuthState.server.ts b/apps/webapp/app/v3/vercel/vercelOAuthState.server.ts new file mode 100644 index 00000000000..31f42acc879 --- /dev/null +++ b/apps/webapp/app/v3/vercel/vercelOAuthState.server.ts @@ -0,0 +1,40 @@ +import { generateJWT, validateJWT } from "@trigger.dev/core/v3/jwt"; +import { z } from "zod"; +import { env } from "~/env.server"; + +export const VercelOAuthStateSchema = z.object({ + organizationId: z.string(), + projectId: z.string(), + environmentSlug: z.string(), + organizationSlug: z.string(), + projectSlug: z.string(), +}); + +export type VercelOAuthState = z.infer; + +export async function generateVercelOAuthState( + params: VercelOAuthState +): Promise { + return generateJWT({ + secretKey: env.ENCRYPTION_KEY, + payload: params, + expirationTime: "15m", + }); +} + +export async function validateVercelOAuthState( + token: string +): Promise<{ ok: true; state: VercelOAuthState } | { ok: false; error: string }> { + const result = await validateJWT(token, env.ENCRYPTION_KEY); + + if (!result.ok) { + return { ok: false, error: result.error }; + } + + const parseResult = VercelOAuthStateSchema.safeParse(result.payload); + if (!parseResult.success) { + return { ok: false, error: "Invalid state payload" }; + } + + return { ok: true, state: parseResult.data }; +} diff --git a/apps/webapp/app/v3/vercel/vercelProjectIntegrationSchema.ts b/apps/webapp/app/v3/vercel/vercelProjectIntegrationSchema.ts new file mode 100644 index 00000000000..213e730c643 --- /dev/null +++ b/apps/webapp/app/v3/vercel/vercelProjectIntegrationSchema.ts @@ -0,0 +1,225 @@ +import { Result } from "neverthrow"; +import { z } from "zod"; + +export const EnvSlugSchema = z.enum(["dev", "stg", "prod", "preview"]); +export type EnvSlug = z.infer; + +export const ALL_ENV_SLUGS: EnvSlug[] = ["dev", "stg", "prod", "preview"]; + +const safeJsonParse = Result.fromThrowable( + (val: string) => JSON.parse(val) as unknown, + () => null +); + +/** + * Zod transform for form fields that submit JSON-encoded arrays. + * Parses the string as JSON and returns the array, or null if invalid. + */ +export const jsonArrayField = z.string().optional().transform((val) => { + if (!val) return null; + return safeJsonParse(val).match( + (parsed) => (Array.isArray(parsed) ? parsed : null), + () => null + ); +}); + +/** + * Zod transform for form fields that submit JSON-encoded EnvSlug arrays. + * Parses the string as JSON and validates each element is a valid EnvSlug. + * Invalid elements are filtered out rather than rejecting the whole array. + */ +export const envSlugArrayField = z.string().optional().transform((val): EnvSlug[] | null => { + if (!val) return null; + return safeJsonParse(val).match( + (parsed) => { + if (!Array.isArray(parsed)) return null; + return parsed.filter((item): item is EnvSlug => EnvSlugSchema.safeParse(item).success); + }, + () => null + ); +}); + +export const VercelIntegrationConfigSchema = z.object({ + atomicBuilds: z.array(EnvSlugSchema).nullable().optional(), + pullEnvVarsBeforeBuild: z.array(EnvSlugSchema).nullable().optional(), + /** Maps a custom Vercel environment to Trigger.dev's staging environment. */ + vercelStagingEnvironment: z.object({ + environmentId: z.string(), + displayName: z.string(), + }).nullable().optional(), + discoverEnvVars: z.array(EnvSlugSchema).nullable().optional(), +}); + +export type VercelIntegrationConfig = z.infer; + +export const TriggerEnvironmentType = z.enum(["PRODUCTION", "STAGING", "PREVIEW", "DEVELOPMENT"]); +export type TriggerEnvironmentType = z.infer; + +/** + * Per-environment, per-variable sync settings. + * Missing env slug = sync all vars. Missing var in env = sync by default. + * Only explicitly `false` entries disable sync. + */ +export const SyncEnvVarsMappingSchema = z.record(EnvSlugSchema, z.record(z.string(), z.boolean())).default({}); + +export type SyncEnvVarsMapping = z.infer; + +export const VercelProjectIntegrationDataSchema = z.object({ + config: VercelIntegrationConfigSchema, + syncEnvVarsMapping: SyncEnvVarsMappingSchema, + vercelProjectName: z.string(), + vercelTeamId: z.string().nullable(), + vercelTeamSlug: z.string().optional(), + vercelProjectId: z.string(), + onboardingCompleted: z.boolean().optional(), +}); + +export type VercelProjectIntegrationData = z.infer; + +export function createDefaultVercelIntegrationData( + vercelProjectId: string, + vercelProjectName: string, + vercelTeamId: string | null, + vercelTeamSlug?: string +): VercelProjectIntegrationData { + return { + config: { + atomicBuilds: ["prod"], + pullEnvVarsBeforeBuild: ["prod", "stg", "preview"], + discoverEnvVars: ["prod", "stg", "preview"], + vercelStagingEnvironment: null, + }, + syncEnvVarsMapping: {}, + vercelProjectId, + vercelProjectName, + vercelTeamId, + vercelTeamSlug, + }; +} + +/** + * Maps a Trigger.dev environment type to its Vercel target identifier(s). + * Returns null for STAGING when no custom environment is configured. + */ +export function envTypeToVercelTarget( + envType: TriggerEnvironmentType, + stagingEnvironmentId?: string | null +): string[] | null { + switch (envType) { + case "PRODUCTION": + return ["production"]; + case "STAGING": + return stagingEnvironmentId ? [stagingEnvironmentId] : null; + case "PREVIEW": + return ["preview"]; + case "DEVELOPMENT": + return ["development"]; + } +} + +export function getAvailableEnvSlugs( + hasStagingEnvironment: boolean, + hasPreviewEnvironment: boolean +): EnvSlug[] { + return ALL_ENV_SLUGS.filter((s) => { + if (s === "stg" && !hasStagingEnvironment) return false; + if (s === "preview" && !hasPreviewEnvironment) return false; + return true; + }); +} + +export function getAvailableEnvSlugsForBuildSettings( + hasStagingEnvironment: boolean, + hasPreviewEnvironment: boolean +): EnvSlug[] { + return getAvailableEnvSlugs(hasStagingEnvironment, hasPreviewEnvironment).filter((s) => s !== "dev"); +} + +export function isDiscoverEnvVarsEnabledForEnvironment( + discoverEnvVars: EnvSlug[] | null | undefined, + environmentType: TriggerEnvironmentType +): boolean { + if (!discoverEnvVars || discoverEnvVars.length === 0) { + return false; + } + const envSlug = envTypeToSlug(environmentType); + return discoverEnvVars.includes(envSlug); +} + +export function envTypeToSlug(environmentType: TriggerEnvironmentType): EnvSlug { + switch (environmentType) { + case "DEVELOPMENT": + return "dev"; + case "STAGING": + return "stg"; + case "PRODUCTION": + return "prod"; + case "PREVIEW": + return "preview"; + } +} + +export function envSlugToType(slug: EnvSlug): TriggerEnvironmentType { + switch (slug) { + case "dev": + return "DEVELOPMENT"; + case "stg": + return "STAGING"; + case "prod": + return "PRODUCTION"; + case "preview": + return "PREVIEW"; + } +} + +export function shouldSyncEnvVar( + mapping: SyncEnvVarsMapping, + envVarName: string, + environmentType: TriggerEnvironmentType +): boolean { + const envSlug = envTypeToSlug(environmentType); + const envSettings = mapping[envSlug]; + if (!envSettings) { + return true; + } + return envSettings[envVarName] !== false; +} + +export function shouldSyncEnvVarForAnyEnvironment( + mapping: SyncEnvVarsMapping, + envVarName: string +): boolean { + for (const slug of ALL_ENV_SLUGS) { + const envSettings = mapping[slug]; + if (!envSettings) { + return true; + } + if (envSettings[envVarName] !== false) { + return true; + } + } + + return false; +} + +export function isPullEnvVarsEnabledForEnvironment( + pullEnvVarsBeforeBuild: EnvSlug[] | null | undefined, + environmentType: TriggerEnvironmentType +): boolean { + if (!pullEnvVarsBeforeBuild || pullEnvVarsBeforeBuild.length === 0) { + return false; + } + const envSlug = envTypeToSlug(environmentType); + return pullEnvVarsBeforeBuild.includes(envSlug); +} + +export function isAtomicBuildsEnabledForEnvironment( + atomicBuilds: EnvSlug[] | null | undefined, + environmentType: TriggerEnvironmentType +): boolean { + if (!atomicBuilds || atomicBuilds.length === 0) { + return false; + } + const envSlug = envTypeToSlug(environmentType); + return atomicBuilds.includes(envSlug); +} diff --git a/apps/webapp/app/v3/vercel/vercelUrls.server.ts b/apps/webapp/app/v3/vercel/vercelUrls.server.ts new file mode 100644 index 00000000000..957e0d2907b --- /dev/null +++ b/apps/webapp/app/v3/vercel/vercelUrls.server.ts @@ -0,0 +1,26 @@ +/** + * Validates `next` parameter from Vercel callbacks. + * Only allows vercel.com subdomains (the expected source) and same-origin relative paths. + */ +export function sanitizeVercelNextUrl(url: string | undefined | null): string | undefined { + if (!url) return undefined; + + // Allow relative paths (same-origin) but reject protocol-relative URLs + if (url.startsWith("/") && !url.startsWith("//")) { + return url; + } + + try { + const parsed = new URL(url); + if ( + parsed.protocol === "https:" && + /^([a-z0-9-]+\.)*vercel\.com$/i.test(parsed.hostname) + ) { + return parsed.toString(); + } + } catch { + // Invalid URL + } + + return undefined; +} diff --git a/apps/webapp/package.json b/apps/webapp/package.json index 51a468b50c0..e2ea2cd5e24 100644 --- a/apps/webapp/package.json +++ b/apps/webapp/package.json @@ -129,6 +129,7 @@ "@unkey/cache": "^1.5.0", "@unkey/error": "^0.2.0", "@upstash/ratelimit": "^1.1.3", + "@vercel/sdk": "^1.19.1", "@whatwg-node/fetch": "^0.9.14", "ai": "^4.3.19", "assert-never": "^1.2.1", diff --git a/apps/webapp/test/vercelUrls.test.ts b/apps/webapp/test/vercelUrls.test.ts new file mode 100644 index 00000000000..9e3d81630f1 --- /dev/null +++ b/apps/webapp/test/vercelUrls.test.ts @@ -0,0 +1,56 @@ +import { describe, it, expect } from "vitest"; +import { sanitizeVercelNextUrl } from "../app/v3/vercel/vercelUrls.server"; + +describe("sanitizeVercelNextUrl", () => { + it("returns undefined for null/undefined/empty", () => { + expect(sanitizeVercelNextUrl(null)).toBeUndefined(); + expect(sanitizeVercelNextUrl(undefined)).toBeUndefined(); + expect(sanitizeVercelNextUrl("")).toBeUndefined(); + }); + + it("allows relative paths", () => { + expect(sanitizeVercelNextUrl("/dashboard")).toBe("/dashboard"); + expect(sanitizeVercelNextUrl("/some/path?query=1")).toBe("/some/path?query=1"); + }); + + it("rejects protocol-relative URLs", () => { + expect(sanitizeVercelNextUrl("//evil.com/path")).toBeUndefined(); + }); + + it("allows vercel.com URLs", () => { + expect(sanitizeVercelNextUrl("https://vercel.com/dashboard")).toBe( + "https://vercel.com/dashboard" + ); + expect(sanitizeVercelNextUrl("https://app.vercel.com/settings")).toBe( + "https://app.vercel.com/settings" + ); + }); + + it("allows vercel.com subdomains", () => { + expect(sanitizeVercelNextUrl("https://my-team.vercel.com/project")).toBe( + "https://my-team.vercel.com/project" + ); + }); + + it("rejects non-vercel HTTPS URLs", () => { + expect(sanitizeVercelNextUrl("https://evil.com/path")).toBeUndefined(); + expect(sanitizeVercelNextUrl("https://not-vercel.com")).toBeUndefined(); + expect(sanitizeVercelNextUrl("https://vercel.com.evil.com")).toBeUndefined(); + }); + + it("rejects HTTP vercel.com URLs", () => { + expect(sanitizeVercelNextUrl("http://vercel.com/dashboard")).toBeUndefined(); + }); + + it("rejects javascript: URLs", () => { + expect(sanitizeVercelNextUrl("javascript:alert(1)")).toBeUndefined(); + }); + + it("rejects data: URLs", () => { + expect(sanitizeVercelNextUrl("data:text/html,")).toBeUndefined(); + }); + + it("rejects invalid URLs", () => { + expect(sanitizeVercelNextUrl("not a url at all")).toBeUndefined(); + }); +}); diff --git a/internal-packages/database/prisma/migrations/20260126175159_add_environment_variable_versioning/migration.sql b/internal-packages/database/prisma/migrations/20260126175159_add_environment_variable_versioning/migration.sql new file mode 100644 index 00000000000..17f013f388b --- /dev/null +++ b/internal-packages/database/prisma/migrations/20260126175159_add_environment_variable_versioning/migration.sql @@ -0,0 +1,3 @@ +-- AlterTable +ALTER TABLE "public"."EnvironmentVariableValue" ADD COLUMN "lastUpdatedBy" JSONB, +ADD COLUMN "version" INTEGER NOT NULL DEFAULT 1; diff --git a/internal-packages/database/prisma/migrations/20260129162621_add_organization_project_integration/migration.sql b/internal-packages/database/prisma/migrations/20260129162621_add_organization_project_integration/migration.sql new file mode 100644 index 00000000000..2c18bd2e1da --- /dev/null +++ b/internal-packages/database/prisma/migrations/20260129162621_add_organization_project_integration/migration.sql @@ -0,0 +1,29 @@ +-- CreateTable +CREATE TABLE "public"."OrganizationProjectIntegration" ( + "id" TEXT NOT NULL, + "organizationIntegrationId" TEXT NOT NULL, + "projectId" TEXT NOT NULL, + "externalEntityId" TEXT NOT NULL, + "integrationData" JSONB NOT NULL, + "installedBy" TEXT, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + "deletedAt" TIMESTAMP(3), + + CONSTRAINT "OrganizationProjectIntegration_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE INDEX "OrganizationProjectIntegration_projectId_idx" ON "public"."OrganizationProjectIntegration"("projectId"); + +-- CreateIndex +CREATE INDEX "OrganizationProjectIntegration_projectId_organizationIntegr_idx" ON "public"."OrganizationProjectIntegration"("projectId", "organizationIntegrationId"); + +-- CreateIndex +CREATE INDEX "OrganizationProjectIntegration_externalEntityId_idx" ON "public"."OrganizationProjectIntegration"("externalEntityId"); + +-- AddForeignKey +ALTER TABLE "public"."OrganizationProjectIntegration" ADD CONSTRAINT "OrganizationProjectIntegration_organizationIntegrationId_fkey" FOREIGN KEY ("organizationIntegrationId") REFERENCES "public"."OrganizationIntegration"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "public"."OrganizationProjectIntegration" ADD CONSTRAINT "OrganizationProjectIntegration_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "public"."Project"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/internal-packages/database/prisma/migrations/20260129162810_add_integration_deployment/migration.sql b/internal-packages/database/prisma/migrations/20260129162810_add_integration_deployment/migration.sql new file mode 100644 index 00000000000..987d643810c --- /dev/null +++ b/internal-packages/database/prisma/migrations/20260129162810_add_integration_deployment/migration.sql @@ -0,0 +1,22 @@ +-- CreateTable +CREATE TABLE "public"."IntegrationDeployment" ( + "id" TEXT NOT NULL, + "integrationName" TEXT NOT NULL, + "integrationDeploymentId" TEXT NOT NULL, + "commitSHA" TEXT NOT NULL, + "deploymentId" TEXT, + "status" TEXT, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + + CONSTRAINT "IntegrationDeployment_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE INDEX "IntegrationDeployment_deploymentId_idx" ON "public"."IntegrationDeployment"("deploymentId"); + +-- CreateIndex +CREATE INDEX "IntegrationDeployment_commitSHA_idx" ON "public"."IntegrationDeployment"("commitSHA"); + +-- AddForeignKey +ALTER TABLE "public"."IntegrationDeployment" ADD CONSTRAINT "IntegrationDeployment_deploymentId_fkey" FOREIGN KEY ("deploymentId") REFERENCES "public"."WorkerDeployment"("id") ON DELETE SET NULL ON UPDATE CASCADE; \ No newline at end of file diff --git a/internal-packages/database/prisma/migrations/20260129162946_alter_tables_for_integrations_data/migration.sql b/internal-packages/database/prisma/migrations/20260129162946_alter_tables_for_integrations_data/migration.sql new file mode 100644 index 00000000000..345d337f187 --- /dev/null +++ b/internal-packages/database/prisma/migrations/20260129162946_alter_tables_for_integrations_data/migration.sql @@ -0,0 +1,9 @@ +-- AlterEnum +ALTER TYPE "public"."IntegrationService" ADD VALUE 'VERCEL'; + +-- AlterTable +ALTER TABLE "public"."OrganizationIntegration" ADD COLUMN "deletedAt" TIMESTAMP(3), +ADD COLUMN "externalOrganizationId" TEXT; + +-- AlterTable +ALTER TABLE "public"."WorkerDeployment" ADD COLUMN "commitSHA" TEXT; diff --git a/internal-packages/database/prisma/migrations/20260129165555_add_organization_integration_idx/migration.sql b/internal-packages/database/prisma/migrations/20260129165555_add_organization_integration_idx/migration.sql new file mode 100644 index 00000000000..ac24fc4bdb0 --- /dev/null +++ b/internal-packages/database/prisma/migrations/20260129165555_add_organization_integration_idx/migration.sql @@ -0,0 +1,3 @@ +-- CreateIndex +CREATE INDEX CONCURRENTLY IF NOT EXISTS "OrganizationIntegration_externalOrganizationId_idx" ON "public"."OrganizationIntegration"("externalOrganizationId"); + diff --git a/internal-packages/database/prisma/migrations/20260129165809_add_worker_deployment_idx/migration.sql b/internal-packages/database/prisma/migrations/20260129165809_add_worker_deployment_idx/migration.sql new file mode 100644 index 00000000000..fcf74c0d978 --- /dev/null +++ b/internal-packages/database/prisma/migrations/20260129165809_add_worker_deployment_idx/migration.sql @@ -0,0 +1,3 @@ + +-- CreateIndex +CREATE INDEX CONCURRENTLY IF NOT EXISTS "WorkerDeployment_commitSHA_idx" ON "public"."WorkerDeployment"("commitSHA"); \ No newline at end of file diff --git a/internal-packages/database/prisma/schema.prisma b/internal-packages/database/prisma/schema.prisma index c76b411412c..a62980cde9b 100644 --- a/internal-packages/database/prisma/schema.prisma +++ b/internal-packages/database/prisma/schema.prisma @@ -384,28 +384,29 @@ model Project { /// The master queues they are allowed to use (impacts what they can set as default and trigger runs with) allowedWorkerQueues String[] @default([]) @map("allowedMasterQueues") - environments RuntimeEnvironment[] - backgroundWorkers BackgroundWorker[] - backgroundWorkerTasks BackgroundWorkerTask[] - taskRuns TaskRun[] - runTags TaskRunTag[] - taskQueues TaskQueue[] - environmentVariables EnvironmentVariable[] - checkpoints Checkpoint[] - WorkerDeployment WorkerDeployment[] - CheckpointRestoreEvent CheckpointRestoreEvent[] - taskSchedules TaskSchedule[] - alertChannels ProjectAlertChannel[] - alerts ProjectAlert[] - alertStorages ProjectAlertStorage[] - bulkActionGroups BulkActionGroup[] - BackgroundWorkerFile BackgroundWorkerFile[] - waitpoints Waitpoint[] - taskRunWaitpoints TaskRunWaitpoint[] - taskRunCheckpoints TaskRunCheckpoint[] - waitpointTags WaitpointTag[] - connectedGithubRepository ConnectedGithubRepository? - customerQueries CustomerQuery[] + environments RuntimeEnvironment[] + backgroundWorkers BackgroundWorker[] + backgroundWorkerTasks BackgroundWorkerTask[] + taskRuns TaskRun[] + runTags TaskRunTag[] + taskQueues TaskQueue[] + environmentVariables EnvironmentVariable[] + checkpoints Checkpoint[] + WorkerDeployment WorkerDeployment[] + CheckpointRestoreEvent CheckpointRestoreEvent[] + taskSchedules TaskSchedule[] + alertChannels ProjectAlertChannel[] + alerts ProjectAlert[] + alertStorages ProjectAlertStorage[] + bulkActionGroups BulkActionGroup[] + BackgroundWorkerFile BackgroundWorkerFile[] + waitpoints Waitpoint[] + taskRunWaitpoints TaskRunWaitpoint[] + taskRunCheckpoints TaskRunCheckpoint[] + waitpointTags WaitpointTag[] + connectedGithubRepository ConnectedGithubRepository? + organizationProjectIntegration OrganizationProjectIntegration[] + customerQueries CustomerQuery[] buildSettings Json? taskScheduleInstances TaskScheduleInstance[] @@ -1712,6 +1713,9 @@ model EnvironmentVariableValue { createdAt DateTime @default(now()) updatedAt DateTime @updatedAt + version Int @default(1) + lastUpdatedBy Json? + @@unique([variableId, environmentId]) } @@ -1825,9 +1829,10 @@ model WorkerDeployment { worker BackgroundWorker? @relation(fields: [workerId], references: [id], onDelete: Cascade, onUpdate: Cascade) workerId String? @unique - triggeredBy User? @relation(fields: [triggeredById], references: [id], onDelete: SetNull, onUpdate: Cascade) - triggeredById String? - triggeredVia String? + triggeredBy User? @relation(fields: [triggeredById], references: [id], onDelete: SetNull, onUpdate: Cascade) + triggeredById String? + triggeredVia String? + commitSHA String? startedAt DateTime? installedAt DateTime? @@ -1846,12 +1851,14 @@ model WorkerDeployment { createdAt DateTime @default(now()) updatedAt DateTime @updatedAt - promotions WorkerDeploymentPromotion[] - alerts ProjectAlert[] - workerInstance WorkerInstance[] + promotions WorkerDeploymentPromotion[] + alerts ProjectAlert[] + workerInstance WorkerInstance[] + integrationDeployments IntegrationDeployment[] @@unique([projectId, shortCode]) @@unique([environmentId, version]) + @@index([commitSHA]) } enum WorkerDeploymentStatus { @@ -2088,7 +2095,8 @@ model OrganizationIntegration { friendlyId String @unique - service IntegrationService + service IntegrationService + externalOrganizationId String? /// Identifier for external, integration's organization (e.g. Vercel's team) integrationData Json @@ -2100,12 +2108,39 @@ model OrganizationIntegration { createdAt DateTime @default(now()) updatedAt DateTime @updatedAt + deletedAt DateTime? - alertChannels ProjectAlertChannel[] + alertChannels ProjectAlertChannel[] + organizationProjectIntegration OrganizationProjectIntegration[] + + @@index([externalOrganizationId]) +} + +model OrganizationProjectIntegration { + id String @id @default(cuid()) + + organizationIntegration OrganizationIntegration @relation(fields: [organizationIntegrationId], references: [id], onDelete: Cascade, onUpdate: Cascade) + organizationIntegrationId String + + project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade) + projectId String + + externalEntityId String /// Identifier for webhooks, for example Vercel's projectId + integrationData Json /// Save useful data like config or external entity name + installedBy String? /// UserId who installed the integration + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + deletedAt DateTime? + + @@index([projectId]) + @@index([projectId, organizationIntegrationId]) + @@index([externalEntityId]) } enum IntegrationService { SLACK + VERCEL } /// Bulk actions, like canceling and replaying runs @@ -2486,3 +2521,21 @@ model CustomerQuery { /// For Stripe metering job - find unprocessed queries @@index([createdAt]) } + +model IntegrationDeployment { + id String @id @default(cuid()) + + integrationName String /// For example Vercel + integrationDeploymentId String /// External ID + commitSHA String + deploymentId String? + status String? /// External deployment status + + workerDeployment WorkerDeployment? @relation(fields: [deploymentId], references: [id], onDelete: SetNull, onUpdate: Cascade) + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@index([commitSHA]) + @@index([deploymentId]) +} diff --git a/packages/core/src/v3/schemas/api.ts b/packages/core/src/v3/schemas/api.ts index 0291d2a05c2..4cb5c965039 100644 --- a/packages/core/src/v3/schemas/api.ts +++ b/packages/core/src/v3/schemas/api.ts @@ -694,6 +694,7 @@ export const GetDeploymentResponseBody = z.object({ version: z.string(), imageReference: z.string().nullish(), imagePlatform: z.string(), + commitSHA: z.string().nullish(), externalBuildData: ExternalBuildData.optional().nullable(), errorData: DeploymentErrorData.nullish(), worker: z @@ -710,6 +711,17 @@ export const GetDeploymentResponseBody = z.object({ ), }) .optional(), + integrationDeployments: z + .array( + z.object({ + id: z.string(), + integrationName: z.string(), + integrationDeploymentId: z.string(), + commitSHA: z.string(), + createdAt: z.coerce.date(), + }) + ) + .nullish(), }); export type GetDeploymentResponseBody = z.infer; @@ -1139,6 +1151,12 @@ export const ImportEnvironmentVariablesRequestBody = z.object({ variables: z.record(z.string()), parentVariables: z.record(z.string()).optional(), override: z.boolean().optional(), + source: z + .discriminatedUnion("type", [ + z.object({ type: z.literal("user"), userId: z.string() }), + z.object({ type: z.literal("integration"), integration: z.string() }), + ]) + .optional(), }); export type ImportEnvironmentVariablesRequestBody = z.infer< diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 99024a016bb..7c88884a549 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -521,6 +521,9 @@ importers: '@upstash/ratelimit': specifier: ^1.1.3 version: 1.1.3(patch_hash=e5922e50fbefb7b2b24950c4b1c5c9ddc4cd25464439c9548d2298c432debe74) + '@vercel/sdk': + specifier: ^1.19.1 + version: 1.19.1 '@whatwg-node/fetch': specifier: ^0.9.14 version: 0.9.14 @@ -1417,7 +1420,7 @@ importers: version: 0.0.1-cli.2.80.0 '@modelcontextprotocol/sdk': specifier: ^1.25.2 - version: 1.25.2(hono@4.5.11)(supports-color@10.0.0)(zod@3.25.76) + version: 1.25.2(hono@4.11.8)(supports-color@10.0.0)(zod@3.25.76) '@opentelemetry/api': specifier: 1.9.0 version: 1.9.0 @@ -1785,7 +1788,7 @@ importers: version: 4.0.14 ai: specifier: ^6.0.0 - version: 6.0.39(zod@3.25.76) + version: 6.0.3(zod@3.25.76) defu: specifier: ^6.1.4 version: 6.1.4 @@ -2070,7 +2073,7 @@ importers: version: 8.5.4 ai: specifier: ^6.0.0 - version: 6.0.39(zod@3.25.76) + version: 6.0.3(zod@3.25.76) encoding: specifier: ^0.1.13 version: 0.1.13 @@ -2436,7 +2439,7 @@ importers: version: link:../../packages/trigger-sdk '@uploadthing/react': specifier: ^7.0.3 - version: 7.0.3(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(react@18.3.1)(uploadthing@7.1.0(express@5.0.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1)) + version: 7.0.3(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(react@18.3.1)(uploadthing@7.1.0(express@5.2.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1)) ai: specifier: ^4.0.0 version: 4.0.0(react@18.3.1)(zod@3.25.76) @@ -2475,7 +2478,7 @@ importers: version: 1.0.7(tailwindcss@3.4.1) uploadthing: specifier: ^7.1.0 - version: 7.1.0(express@5.0.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1) + version: 7.1.0(express@5.2.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1) zod: specifier: 3.25.76 version: 3.25.76 @@ -2843,8 +2846,8 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 - '@ai-sdk/gateway@3.0.16': - resolution: {integrity: sha512-OOY5CfRJiHvh/8np2vs1RQaCZ5hWv2qOeEmmeiABXK3gLQHUVnCO+1hhoLsZdHM5iElu6M407dAOfyvTsKJqcQ==} + '@ai-sdk/gateway@3.0.2': + resolution: {integrity: sha512-giJEg9ob45htbu3iautK+2kvplY2JnTj7ir4wZzYSQWvqGatWfBBfDuNCU5wSJt9BCGjymM5ZS9ziD42JGCZBw==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 @@ -2921,8 +2924,8 @@ packages: peerDependencies: zod: ^3.25.76 || ^4 - '@ai-sdk/provider-utils@4.0.8': - resolution: {integrity: sha512-ns9gN7MmpI8vTRandzgz+KK/zNMLzhrriiKECMt4euLtQFSBgNfydtagPOX4j4pS1/3KvHF6RivhT3gNQgBZsg==} + '@ai-sdk/provider-utils@4.0.1': + resolution: {integrity: sha512-de2v8gH9zj47tRI38oSxhQIewmNc+OZjYIOOaMoVWKL65ERSav2PYYZHPSPCrfOeLMkv+Dyh8Y0QGwkO29wMWQ==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 @@ -2947,8 +2950,8 @@ packages: resolution: {integrity: sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==} engines: {node: '>=18'} - '@ai-sdk/provider@3.0.4': - resolution: {integrity: sha512-5KXyBOSEX+l67elrEa+wqo/LSsSTtrPj9Uoh3zMbe/ceQX4ucHI3b9nUEfNkGF3Ry1svv90widAt+aiKdIJasQ==} + '@ai-sdk/provider@3.0.0': + resolution: {integrity: sha512-m9ka3ptkPQbaHHZHqDXDF9C9B5/Mav0KTdky1k2HZ3/nrW2t1AgObxIVPyGDWQNS9FXT/FS6PIoSjpcP/No8rQ==} engines: {node: '>=18'} '@ai-sdk/react@1.0.0': @@ -5872,6 +5875,16 @@ packages: '@cfworker/json-schema': optional: true + '@modelcontextprotocol/sdk@1.26.0': + resolution: {integrity: sha512-Y5RmPncpiDtTXDbLKswIJzTqu2hyBKxTNsgKqKclDbhIgg1wgtf1fRuvxgTnRfcnxtvvgbIEcqUOzZrJ6iSReg==} + engines: {node: '>=18'} + peerDependencies: + '@cfworker/json-schema': ^4.1.1 + zod: ^3.25 || ^4.0 + peerDependenciesMeta: + '@cfworker/json-schema': + optional: true + '@msgpack/msgpack@3.0.0-beta2': resolution: {integrity: sha512-y+l1PNV0XDyY8sM3YtuMLK5vE3/hkfId+Do8pLo/OPxfxuFAUwcGz3oiiUuV46/aBpwTzZ+mRWVMtlSKbradhw==} engines: {node: '>= 14'} @@ -11094,8 +11107,8 @@ packages: resolution: {integrity: sha512-yNEQvPcVrK9sIe637+I0jD6leluPxzwJKx/Haw6F4H77CdDsszUn5V3o96LPziXkSNE2B83+Z3mjqGKBK/R6Gg==} engines: {node: '>= 20'} - '@vercel/oidc@3.1.0': - resolution: {integrity: sha512-Fw28YZpRnA3cAHHDlkt7xQHiJ0fcL+NRcIqsocZQUSmbzeIKRpwttJjik5ZGanXP+vlA4SbTg+AbA3bP363l+w==} + '@vercel/oidc@3.0.5': + resolution: {integrity: sha512-fnYhv671l+eTTp48gB4zEsTW/YtRgRPnkI2nT7x6qw5rkI1Lq2hTmQIpHPgyThI0znLK+vX2n9XxKdXZ7BUbbw==} engines: {node: '>= 20'} '@vercel/otel@1.13.0': @@ -11115,6 +11128,10 @@ packages: engines: {node: '>=18.14'} deprecated: '@vercel/postgres is deprecated. You can either choose an alternate storage solution from the Vercel Marketplace if you want to set up a new database. Or you can follow this guide to migrate your existing Vercel Postgres db: https://neon.com/docs/guides/vercel-postgres-transition-guide' + '@vercel/sdk@1.19.1': + resolution: {integrity: sha512-K4rmtUT6t1vX06tiY44ot8A7W1FKN7g/tMkE7yZghCgNQ8b30SzljBd4ni8RNp2pJzM/HrZmphRDeIArO7oZuw==} + hasBin: true + '@vitest/coverage-v8@3.1.4': resolution: {integrity: sha512-G4p6OtioySL+hPV7Y6JHlhpsODbJzt1ndwHAFkyk6vVjpK03PFsKnauZIzcd0PrK4zAbc5lc+jeZ+eNGiMA+iw==} peerDependencies: @@ -11435,8 +11452,8 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 - ai@6.0.39: - resolution: {integrity: sha512-hF05gF4H+IxuilA8kNANVVHQXduTJsJaH74jmlmy8mcQt3NZgPYe2zZNyGBV4DPDYTUDt1h31hbLgQqJTn5LGA==} + ai@6.0.3: + resolution: {integrity: sha512-OOo+/C+sEyscoLnbY3w42vjQDICioVNyS+F+ogwq6O5RJL/vgWGuiLzFwuP7oHTeni/MkmX8tIge48GTdaV7QQ==} engines: {node: '>=18'} peerDependencies: zod: ^3.25.76 || ^4.1.8 @@ -11806,6 +11823,10 @@ packages: resolution: {integrity: sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==} engines: {node: '>=18'} + body-parser@2.2.2: + resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==} + engines: {node: '>=18'} + bottleneck@2.19.5: resolution: {integrity: sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==} @@ -12764,6 +12785,15 @@ packages: supports-color: optional: true + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + decamelize-keys@1.1.1: resolution: {integrity: sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==} engines: {node: '>=0.10.0'} @@ -13720,6 +13750,12 @@ packages: peerDependencies: express: ^4.11 || 5 || ^5.0.0-beta.1 + express-rate-limit@8.2.1: + resolution: {integrity: sha512-PCZEIEIxqwhzw4KF0n7QF4QqruVTcF73O5kFKUnGOyjbCCgizBBiFaYpd/fnBLUMPw/BWw9OsiN7GgrNYr7j6g==} + engines: {node: '>= 16'} + peerDependencies: + express: '>= 4.11' + express@4.20.0: resolution: {integrity: sha512-pLdae7I6QqShF5PnNTCVn4hI91Dx0Grkn2+IAsMTgMIKuQVte2dN9PeGSSAME2FR8anOhVA62QDIUaWVfEXVLw==} engines: {node: '>= 0.10.0'} @@ -13728,6 +13764,10 @@ packages: resolution: {integrity: sha512-ORF7g6qGnD+YtUG9yx4DFoqCShNMmUKiXuT5oWMHiOvt/4WFbHC6yCwQMTSBMno7AqntNCAzzcnnjowRkTL9eQ==} engines: {node: '>= 18'} + express@5.2.1: + resolution: {integrity: sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==} + engines: {node: '>= 18'} + exsolve@1.0.7: resolution: {integrity: sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==} @@ -14381,6 +14421,10 @@ packages: hoist-non-react-statics@3.3.2: resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==} + hono@4.11.8: + resolution: {integrity: sha512-eVkB/CYCCei7K2WElZW9yYQFWssG0DhaDhVvr7wy5jJ22K+ck8fWW0EsLpB0sITUTvPnc97+rrbQqIr5iqiy9Q==} + engines: {node: '>=16.9.0'} + hono@4.5.11: resolution: {integrity: sha512-62FcjLPtjAFwISVBUshryl+vbHOjg8rE4uIK/dxyR8GpLztunZpwFmfEvmJCUI7xoGh/Sr3CGCDPCmYxVw7wUQ==} engines: {node: '>=16.0.0'} @@ -14419,6 +14463,10 @@ packages: resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} engines: {node: '>= 0.8'} + http-errors@2.0.1: + resolution: {integrity: sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==} + engines: {node: '>= 0.8'} + http-proxy-agent@7.0.2: resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} engines: {node: '>= 14'} @@ -14467,6 +14515,10 @@ packages: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} + iconv-lite@0.7.2: + resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} + engines: {node: '>=0.10.0'} + icss-utils@5.1.0: resolution: {integrity: sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==} engines: {node: ^10 || ^12 || >= 14} @@ -14579,6 +14631,10 @@ packages: resolution: {integrity: sha512-1DKMMzlIHM02eBBVOFQ1+AolGjs6+xEcM4PDL7NqOS6szq7H9jSaEkIUH6/a5Hl241LzW6JLSiAbNvTQjUupUA==} engines: {node: '>=12.22.0'} + ip-address@10.0.1: + resolution: {integrity: sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==} + engines: {node: '>= 12'} + ip-address@9.0.5: resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==} engines: {node: '>= 12'} @@ -15841,6 +15897,10 @@ packages: resolution: {integrity: sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==} engines: {node: '>= 0.6'} + mime-db@1.54.0: + resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} + engines: {node: '>= 0.6'} + mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} @@ -15849,6 +15909,10 @@ packages: resolution: {integrity: sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==} engines: {node: '>= 0.6'} + mime-types@3.0.2: + resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} + engines: {node: '>=18'} + mime@1.6.0: resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} engines: {node: '>=4'} @@ -17498,6 +17562,10 @@ packages: resolution: {integrity: sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==} engines: {node: '>= 0.8'} + raw-body@3.0.2: + resolution: {integrity: sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==} + engines: {node: '>= 0.10'} + rc9@2.1.2: resolution: {integrity: sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==} @@ -18073,6 +18141,10 @@ packages: resolution: {integrity: sha512-/m/NSLxeYEgWNtyC+WtNHCF7jbGxOibVWKnn+1Psff4dJGOfoXP+MuC/f2CwSmyiHdOIzYnYFp4W6GxWfekaLA==} engines: {node: '>= 18'} + router@2.2.0: + resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} + engines: {node: '>= 18'} + rtl-css-js@1.16.1: resolution: {integrity: sha512-lRQgou1mu19e+Ya0LsTvKrVJ5TYUbqCVPAiImX3UfLTenarvPUl1QFdvu5Z3PYmHT9RCcwIfbjRQBntExyj3Zg==} @@ -18200,6 +18272,10 @@ packages: resolution: {integrity: sha512-v67WcEouB5GxbTWL/4NeToqcZiAWEq90N888fczVArY8A79J0L4FD7vj5hm3eUMua5EpoQ59wa/oovY6TLvRUA==} engines: {node: '>= 18'} + send@1.2.1: + resolution: {integrity: sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==} + engines: {node: '>= 18'} + serialize-javascript@6.0.1: resolution: {integrity: sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==} @@ -18214,6 +18290,10 @@ packages: resolution: {integrity: sha512-A3We5UfEjG8Z7VkDv6uItWw6HY2bBSBJT1KtVESn6EOoOr2jAxNhxWCLY3jDE2WcuHXByWju74ck3ZgLwL8xmA==} engines: {node: '>= 18'} + serve-static@2.2.1: + resolution: {integrity: sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==} + engines: {node: '>= 18'} + set-blocking@2.0.0: resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} @@ -18513,6 +18593,10 @@ packages: resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} engines: {node: '>= 0.8'} + statuses@2.0.2: + resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} + engines: {node: '>= 0.8'} + std-env@3.7.0: resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} @@ -18824,10 +18908,6 @@ packages: resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==} engines: {node: '>=6'} - tapable@2.2.2: - resolution: {integrity: sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg==} - engines: {node: '>=6'} - tapable@2.3.0: resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} engines: {node: '>=6'} @@ -19311,6 +19391,10 @@ packages: resolution: {integrity: sha512-gd0sGezQYCbWSbkZr75mln4YBidWUN60+devscpLF5mtRDUpiaTvKpBNrdaCvel1NdR2k6vclXybU5fBd2i+nw==} engines: {node: '>= 0.6'} + type-is@2.0.1: + resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} + engines: {node: '>= 0.6'} + typed-array-buffer@1.0.2: resolution: {integrity: sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==} engines: {node: '>= 0.4'} @@ -20079,6 +20163,11 @@ packages: peerDependencies: zod: ^3.25 || ^4 + zod-to-json-schema@3.25.1: + resolution: {integrity: sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA==} + peerDependencies: + zod: ^3.25 || ^4 + zod-validation-error@1.5.0: resolution: {integrity: sha512-/7eFkAI4qV0tcxMBB/3+d2c1P6jzzZYdYSlBuAklzMuCrJu5bzJfHS0yVAS87dRHVlhftd6RFJDIvv03JgkSbw==} engines: {node: '>=16.0.0'} @@ -20132,11 +20221,11 @@ snapshots: '@vercel/oidc': 3.0.3 zod: 3.25.76 - '@ai-sdk/gateway@3.0.16(zod@3.25.76)': + '@ai-sdk/gateway@3.0.2(zod@3.25.76)': dependencies: - '@ai-sdk/provider': 3.0.4 - '@ai-sdk/provider-utils': 4.0.8(zod@3.25.76) - '@vercel/oidc': 3.1.0 + '@ai-sdk/provider': 3.0.0 + '@ai-sdk/provider-utils': 4.0.1(zod@3.25.76) + '@vercel/oidc': 3.0.5 zod: 3.25.76 '@ai-sdk/openai@1.0.1(zod@3.25.76)': @@ -20216,9 +20305,9 @@ snapshots: zod: 3.25.76 zod-to-json-schema: 3.24.6(zod@3.25.76) - '@ai-sdk/provider-utils@4.0.8(zod@3.25.76)': + '@ai-sdk/provider-utils@4.0.1(zod@3.25.76)': dependencies: - '@ai-sdk/provider': 3.0.4 + '@ai-sdk/provider': 3.0.0 '@standard-schema/spec': 1.1.0 eventsource-parser: 3.0.6 zod: 3.25.76 @@ -20243,7 +20332,7 @@ snapshots: dependencies: json-schema: 0.4.0 - '@ai-sdk/provider@3.0.4': + '@ai-sdk/provider@3.0.0': dependencies: json-schema: 0.4.0 @@ -23756,9 +23845,9 @@ snapshots: dependencies: hono: 4.5.11 - '@hono/node-server@1.19.9(hono@4.5.11)': + '@hono/node-server@1.19.9(hono@4.11.8)': dependencies: - hono: 4.5.11 + hono: 4.11.8 '@hono/node-ws@1.0.4(@hono/node-server@1.12.2(hono@4.5.11))(bufferutil@4.0.9)': dependencies: @@ -24039,7 +24128,7 @@ snapshots: '@jridgewell/source-map@0.3.3': dependencies: '@jridgewell/gen-mapping': 0.3.8 - '@jridgewell/trace-mapping': 0.3.25 + '@jridgewell/trace-mapping': 0.3.31 '@jridgewell/sourcemap-codec@1.5.0': {} @@ -24220,9 +24309,9 @@ snapshots: '@microsoft/fetch-event-source@2.0.1': {} - '@modelcontextprotocol/sdk@1.25.2(hono@4.5.11)(supports-color@10.0.0)(zod@3.25.76)': + '@modelcontextprotocol/sdk@1.25.2(hono@4.11.8)(supports-color@10.0.0)(zod@3.25.76)': dependencies: - '@hono/node-server': 1.19.9(hono@4.5.11) + '@hono/node-server': 1.19.9(hono@4.11.8) ajv: 8.17.1 ajv-formats: 3.0.1(ajv@8.17.1) content-type: 1.0.5 @@ -24242,6 +24331,28 @@ snapshots: - hono - supports-color + '@modelcontextprotocol/sdk@1.26.0(zod@3.25.76)': + dependencies: + '@hono/node-server': 1.19.9(hono@4.11.8) + ajv: 8.17.1 + ajv-formats: 3.0.1(ajv@8.17.1) + content-type: 1.0.5 + cors: 2.8.5 + cross-spawn: 7.0.6 + eventsource: 3.0.5 + eventsource-parser: 3.0.6 + express: 5.2.1 + express-rate-limit: 8.2.1(express@5.2.1) + hono: 4.11.8 + jose: 6.1.3 + json-schema-typed: 8.0.2 + pkce-challenge: 5.0.0 + raw-body: 3.0.0 + zod: 3.25.76 + zod-to-json-schema: 3.25.1(zod@3.25.76) + transitivePeerDependencies: + - supports-color + '@msgpack/msgpack@3.0.0-beta2': {} '@neondatabase/serverless@0.9.5': @@ -31234,12 +31345,12 @@ snapshots: '@uploadthing/mime-types@0.3.0': {} - '@uploadthing/react@7.0.3(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(react@18.3.1)(uploadthing@7.1.0(express@5.0.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1))': + '@uploadthing/react@7.0.3(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(react@18.3.1)(uploadthing@7.1.0(express@5.2.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1))': dependencies: '@uploadthing/shared': 7.0.3 file-selector: 0.6.0 react: 18.3.1 - uploadthing: 7.1.0(express@5.0.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1) + uploadthing: 7.1.0(express@5.2.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1) optionalDependencies: next: 14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1) @@ -31310,7 +31421,7 @@ snapshots: '@vercel/oidc@3.0.3': {} - '@vercel/oidc@3.1.0': {} + '@vercel/oidc@3.0.5': {} '@vercel/otel@1.13.0(@opentelemetry/api-logs@0.203.0)(@opentelemetry/api@1.9.0)(@opentelemetry/instrumentation@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-logs@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-metrics@2.0.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))': dependencies: @@ -31330,6 +31441,14 @@ snapshots: transitivePeerDependencies: - utf-8-validate + '@vercel/sdk@1.19.1': + dependencies: + '@modelcontextprotocol/sdk': 1.26.0(zod@3.25.76) + zod: 3.25.76 + transitivePeerDependencies: + - '@cfworker/json-schema' + - supports-color + '@vitest/coverage-v8@3.1.4(vitest@3.1.4(@types/debug@4.1.12)(@types/node@20.14.14)(lightningcss@1.29.2)(terser@5.44.1))': dependencies: '@ampproject/remapping': 2.3.0 @@ -31739,11 +31858,11 @@ snapshots: '@opentelemetry/api': 1.9.0 zod: 3.25.76 - ai@6.0.39(zod@3.25.76): + ai@6.0.3(zod@3.25.76): dependencies: - '@ai-sdk/gateway': 3.0.16(zod@3.25.76) - '@ai-sdk/provider': 3.0.4 - '@ai-sdk/provider-utils': 4.0.8(zod@3.25.76) + '@ai-sdk/gateway': 3.0.2(zod@3.25.76) + '@ai-sdk/provider': 3.0.0 + '@ai-sdk/provider-utils': 4.0.1(zod@3.25.76) '@opentelemetry/api': 1.9.0 zod: 3.25.76 @@ -32170,6 +32289,20 @@ snapshots: transitivePeerDependencies: - supports-color + body-parser@2.2.2: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 4.4.3 + http-errors: 2.0.0 + iconv-lite: 0.7.2 + on-finished: 2.4.1 + qs: 6.14.1 + raw-body: 3.0.2 + type-is: 2.0.1 + transitivePeerDependencies: + - supports-color + bottleneck@2.19.5: {} bowser@2.11.0: {} @@ -33158,6 +33291,10 @@ snapshots: optionalDependencies: supports-color: 10.0.0 + debug@4.4.3: + dependencies: + ms: 2.1.3 + decamelize-keys@1.1.1: dependencies: decamelize: 1.2.0 @@ -33515,7 +33652,7 @@ snapshots: enhanced-resolve@5.18.3: dependencies: graceful-fs: 4.2.11 - tapable: 2.2.2 + tapable: 2.3.0 enquirer@2.3.6: dependencies: @@ -34393,6 +34530,11 @@ snapshots: dependencies: express: 5.0.1(supports-color@10.0.0) + express-rate-limit@8.2.1(express@5.2.1): + dependencies: + express: 5.2.1 + ip-address: 10.0.1 + express@4.20.0: dependencies: accepts: 1.3.8 @@ -34466,6 +34608,39 @@ snapshots: transitivePeerDependencies: - supports-color + express@5.2.1: + dependencies: + accepts: 2.0.0 + body-parser: 2.2.2 + content-disposition: 1.0.0 + content-type: 1.0.5 + cookie: 0.7.1 + cookie-signature: 1.2.2 + debug: 4.4.1(supports-color@10.0.0) + depd: 2.0.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 2.1.0(supports-color@10.0.0) + fresh: 2.0.0 + http-errors: 2.0.0 + merge-descriptors: 2.0.0 + mime-types: 3.0.0 + on-finished: 2.4.1 + once: 1.4.0 + parseurl: 1.3.3 + proxy-addr: 2.0.7 + qs: 6.14.1 + range-parser: 1.2.1 + router: 2.2.0 + send: 1.1.0(supports-color@10.0.0) + serve-static: 2.2.1 + statuses: 2.0.1 + type-is: 2.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + exsolve@1.0.7: {} extend@3.0.2: {} @@ -35301,6 +35476,8 @@ snapshots: dependencies: react-is: 16.13.1 + hono@4.11.8: {} + hono@4.5.11: {} hosted-git-info@2.8.9: {} @@ -35342,6 +35519,14 @@ snapshots: statuses: 2.0.1 toidentifier: 1.0.1 + http-errors@2.0.1: + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.2 + toidentifier: 1.0.1 + http-proxy-agent@7.0.2: dependencies: agent-base: 7.1.4 @@ -35393,6 +35578,10 @@ snapshots: dependencies: safer-buffer: 2.1.2 + iconv-lite@0.7.2: + dependencies: + safer-buffer: 2.1.2 + icss-utils@5.1.0(postcss@8.4.35): dependencies: postcss: 8.4.35 @@ -35507,6 +35696,8 @@ snapshots: transitivePeerDependencies: - supports-color + ip-address@10.0.1: {} + ip-address@9.0.5: dependencies: jsbn: 1.1.0 @@ -37070,6 +37261,8 @@ snapshots: mime-db@1.53.0: {} + mime-db@1.54.0: {} + mime-types@2.1.35: dependencies: mime-db: 1.52.0 @@ -37078,6 +37271,10 @@ snapshots: dependencies: mime-db: 1.53.0 + mime-types@3.0.2: + dependencies: + mime-db: 1.54.0 + mime@1.6.0: {} mime@2.6.0: {} @@ -38790,6 +38987,13 @@ snapshots: iconv-lite: 0.6.3 unpipe: 1.0.0 + raw-body@3.0.2: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + unpipe: 1.0.0 + rc9@2.1.2: dependencies: defu: 6.1.4 @@ -39705,6 +39909,16 @@ snapshots: parseurl: 1.3.3 path-to-regexp: 8.2.0 + router@2.2.0: + dependencies: + debug: 4.4.1(supports-color@10.0.0) + depd: 2.0.0 + is-promise: 4.0.0 + parseurl: 1.3.3 + path-to-regexp: 8.2.0 + transitivePeerDependencies: + - supports-color + rtl-css-js@1.16.1: dependencies: '@babel/runtime': 7.28.4 @@ -39866,6 +40080,22 @@ snapshots: transitivePeerDependencies: - supports-color + send@1.2.1: + dependencies: + debug: 4.4.3 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 2.0.0 + http-errors: 2.0.1 + mime-types: 3.0.2 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + serialize-javascript@6.0.1: dependencies: randombytes: 2.1.0 @@ -39892,6 +40122,15 @@ snapshots: transitivePeerDependencies: - supports-color + serve-static@2.2.1: + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 1.2.1 + transitivePeerDependencies: + - supports-color + set-blocking@2.0.0: {} set-cookie-parser@2.6.0: {} @@ -40313,6 +40552,8 @@ snapshots: statuses@2.0.1: {} + statuses@2.0.2: {} + std-env@3.7.0: {} std-env@3.8.1: {} @@ -40743,8 +40984,6 @@ snapshots: tapable@2.2.1: {} - tapable@2.2.2: {} - tapable@2.3.0: {} tar-fs@2.1.3: @@ -41262,6 +41501,12 @@ snapshots: media-typer: 1.1.0 mime-types: 3.0.0 + type-is@2.0.1: + dependencies: + content-type: 1.0.5 + media-typer: 1.1.0 + mime-types: 3.0.0 + typed-array-buffer@1.0.2: dependencies: call-bind: 1.0.8 @@ -41472,7 +41717,7 @@ snapshots: escalade: 3.2.0 picocolors: 1.1.1 - uploadthing@7.1.0(express@5.0.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1): + uploadthing@7.1.0(express@5.2.1)(fastify@5.4.0)(next@14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1))(tailwindcss@3.4.1): dependencies: '@effect/platform': 0.63.2(@effect/schema@0.72.2(effect@3.7.2))(effect@3.7.2) '@effect/schema': 0.72.2(effect@3.7.2) @@ -41480,7 +41725,7 @@ snapshots: '@uploadthing/shared': 7.0.3 effect: 3.7.2 optionalDependencies: - express: 5.0.1(supports-color@10.0.0) + express: 5.2.1 fastify: 5.4.0 next: 14.2.21(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@18.2.0(react@18.3.1))(react@18.3.1) tailwindcss: 3.4.1 @@ -42118,6 +42363,10 @@ snapshots: dependencies: zod: 3.25.76 + zod-to-json-schema@3.25.1(zod@3.25.76): + dependencies: + zod: 3.25.76 + zod-validation-error@1.5.0(zod@3.25.76): dependencies: zod: 3.25.76 From eaed7d0ba4a6f5302218ae829fb208db402e9a7a Mon Sep 17 00:00:00 2001 From: Mihai Popescu Date: Tue, 10 Feb 2026 12:19:26 +0200 Subject: [PATCH 008/225] fix(webapp): UI/UX improvements for logs, query, and shortcuts (#2997) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## ✅ Checklist - [X] I have followed every step in the [contributing guide](https://github.com/triggerdotdev/trigger.dev/blob/main/CONTRIBUTING.md) - [X] The PR title follows the convention. - [X] I ran and tested the code works --- ## Testing Manually tested each implementation. --- ## Changelog * Updated Logs Page with the new implementation in time filter component * In TRQL editor users can now click on empty/blank spaces in the editor and the cursor will appear * Added CMD + / for line commenting in TRQL * Activated proper undo/redo functionality in CodeMirror (TRQL editor) * Added a check for new logs button, previously once the user got to the end of the logs he could not check for newer logs * Added showing MS in logs page Dates * Removed LOG_INFO internal logs, they are available with Admin Debug flag * Added support for correct timezone render on server side. * Increased CLICKHOUSE_LOGS_LIST_MAX_MEMORY_USAGE to 1GB * Changed Previous run/ Next run to J/K, consistent with previous/next page in Runs list --- apps/webapp/app/components/Shortcuts.tsx | 4 +- apps/webapp/app/components/TimezoneSetter.tsx | 30 +++++ .../webapp/app/components/code/TSQLEditor.tsx | 61 +++++++++- .../app/components/code/codeMirrorSetup.ts | 13 ++- .../app/components/logs/LogDetailView.tsx | 4 +- apps/webapp/app/components/logs/LogsTable.tsx | 20 ++-- .../app/components/primitives/DateTime.tsx | 49 ++++++--- apps/webapp/app/env.server.ts | 2 +- apps/webapp/app/hooks/useShortcutKeys.tsx | 6 +- .../presenters/v3/LogsListPresenter.server.ts | 2 + apps/webapp/app/root.tsx | 5 + .../route.tsx | 104 ++++++++++-------- .../route.tsx | 4 +- apps/webapp/app/routes/resources.timezone.ts | 43 ++++++++ .../preferences/uiPreferences.server.ts | 12 ++ 15 files changed, 270 insertions(+), 89 deletions(-) create mode 100644 apps/webapp/app/components/TimezoneSetter.tsx create mode 100644 apps/webapp/app/routes/resources.timezone.ts diff --git a/apps/webapp/app/components/Shortcuts.tsx b/apps/webapp/app/components/Shortcuts.tsx index a3fcd074988..df76bdc5223 100644 --- a/apps/webapp/app/components/Shortcuts.tsx +++ b/apps/webapp/app/components/Shortcuts.tsx @@ -139,8 +139,8 @@ function ShortcutContent() {
- - + + diff --git a/apps/webapp/app/components/TimezoneSetter.tsx b/apps/webapp/app/components/TimezoneSetter.tsx new file mode 100644 index 00000000000..3481af6571d --- /dev/null +++ b/apps/webapp/app/components/TimezoneSetter.tsx @@ -0,0 +1,30 @@ +import { useFetcher } from "@remix-run/react"; +import { useEffect, useRef } from "react"; +import { useTypedLoaderData } from "remix-typedjson"; +import type { loader } from "~/root"; + +export function TimezoneSetter() { + const { timezone: storedTimezone } = useTypedLoaderData(); + const fetcher = useFetcher(); + const hasSetTimezone = useRef(false); + + useEffect(() => { + if (hasSetTimezone.current) return; + + const browserTimezone = Intl.DateTimeFormat().resolvedOptions().timeZone; + + if (browserTimezone && browserTimezone !== storedTimezone) { + hasSetTimezone.current = true; + fetcher.submit( + { timezone: browserTimezone }, + { + method: "POST", + action: "/resources/timezone", + encType: "application/json", + } + ); + } + }, [storedTimezone, fetcher]); + + return null; +} diff --git a/apps/webapp/app/components/code/TSQLEditor.tsx b/apps/webapp/app/components/code/TSQLEditor.tsx index 998fd2da714..1641d9c3db5 100644 --- a/apps/webapp/app/components/code/TSQLEditor.tsx +++ b/apps/webapp/app/components/code/TSQLEditor.tsx @@ -1,7 +1,7 @@ import { sql, StandardSQL } from "@codemirror/lang-sql"; import { autocompletion, startCompletion } from "@codemirror/autocomplete"; import { linter, lintGutter } from "@codemirror/lint"; -import { EditorView } from "@codemirror/view"; +import { EditorView, keymap } from "@codemirror/view"; import type { ViewUpdate } from "@codemirror/view"; import { CheckIcon, ClipboardIcon, SparklesIcon, TrashIcon } from "@heroicons/react/20/solid"; import { @@ -60,6 +60,54 @@ const defaultProps: TSQLEditorDefaultProps = { schema: [], }; +// Toggle comment on current line or selected lines with -- comment symbol +const toggleLineComment = (view: EditorView): boolean => { + const { from, to } = view.state.selection.main; + const startLine = view.state.doc.lineAt(from); + // When `to` is exactly at the start of a line and there's an actual selection, + // the caret sits before that line — so exclude it by stepping back one position. + const adjustedTo = to > from && view.state.doc.lineAt(to).from === to ? to - 1 : to; + const endLine = view.state.doc.lineAt(adjustedTo); + + // Collect all lines in the selection + const lines: { from: number; to: number; text: string }[] = []; + for (let i = startLine.number; i <= endLine.number; i++) { + const line = view.state.doc.line(i); + lines.push({ from: line.from, to: line.to, text: line.text }); + } + + // Determine action: if all non-empty lines are commented, uncomment; otherwise comment + const allCommented = lines.every((line) => { + const trimmed = line.text.trimStart(); + return trimmed.length === 0 || trimmed.startsWith("--"); + }); + + const changes = lines + .map((line) => { + const trimmed = line.text.trimStart(); + if (trimmed.length === 0) return null; // skip empty lines + const indent = line.text.length - trimmed.length; + + if (allCommented) { + // Remove comment: strip "-- " or just "--" + const afterComment = trimmed.slice(2); + const newText = line.text.slice(0, indent) + afterComment.replace(/^\s/, ""); + return { from: line.from, to: line.to, insert: newText }; + } else { + // Add comment: prepend "-- " to the line content + const newText = line.text.slice(0, indent) + "-- " + trimmed; + return { from: line.from, to: line.to, insert: newText }; + } + }) + .filter((c): c is { from: number; to: number; insert: string } => c !== null); + + if (changes.length > 0) { + view.dispatch({ changes }); + } + + return true; +}; + export function TSQLEditor(opts: TSQLEditorProps) { const { defaultValue = "", @@ -133,6 +181,14 @@ export function TSQLEditor(opts: TSQLEditorProps) { ); } + // Add keyboard shortcut for toggling comments + exts.push( + keymap.of([ + { key: "Cmd-/", run: toggleLineComment }, + { key: "Ctrl-/", run: toggleLineComment }, + ]) + ); + return exts; }, [schema, linterEnabled]); @@ -218,6 +274,9 @@ export function TSQLEditor(opts: TSQLEditorProps) { "min-h-0 flex-1 overflow-auto scrollbar-thin scrollbar-track-transparent scrollbar-thumb-charcoal-600" )} ref={editor} + onClick={() => { + view?.focus(); + }} onBlur={() => { if (!onBlur) return; if (!view) return; diff --git a/apps/webapp/app/components/code/codeMirrorSetup.ts b/apps/webapp/app/components/code/codeMirrorSetup.ts index 811a6ebc298..52a8e12a4d8 100644 --- a/apps/webapp/app/components/code/codeMirrorSetup.ts +++ b/apps/webapp/app/components/code/codeMirrorSetup.ts @@ -1,5 +1,5 @@ import { closeBrackets } from "@codemirror/autocomplete"; -import { indentWithTab } from "@codemirror/commands"; +import { indentWithTab, history, historyKeymap, undo, redo } from "@codemirror/commands"; import { bracketMatching } from "@codemirror/language"; import { lintKeymap } from "@codemirror/lint"; import { highlightSelectionMatches } from "@codemirror/search"; @@ -18,6 +18,7 @@ export function getEditorSetup(showLineNumbers = true, showHighlights = true): A const options = [ drawSelection(), dropCursor(), + history(), bracketMatching(), closeBrackets(), Prec.highest( @@ -31,7 +32,15 @@ export function getEditorSetup(showLineNumbers = true, showHighlights = true): A }, ]) ), - keymap.of([indentWithTab, ...lintKeymap]), + // Explicit undo/redo keybindings with high precedence + Prec.high( + keymap.of([ + { key: "Mod-z", run: undo }, + { key: "Mod-Shift-z", run: redo }, + { key: "Mod-y", run: redo }, + ]) + ), + keymap.of([indentWithTab, ...historyKeymap, ...lintKeymap]), ]; if (showLineNumbers) { diff --git a/apps/webapp/app/components/logs/LogDetailView.tsx b/apps/webapp/app/components/logs/LogDetailView.tsx index 22e2e288ac4..6b3a76b8a83 100644 --- a/apps/webapp/app/components/logs/LogDetailView.tsx +++ b/apps/webapp/app/components/logs/LogDetailView.tsx @@ -8,7 +8,7 @@ import { useEffect, useState } from "react"; import { useTypedFetcher } from "remix-typedjson"; import { cn } from "~/utils/cn"; import { Button } from "~/components/primitives/Buttons"; -import { DateTime } from "~/components/primitives/DateTime"; +import { DateTimeAccurate } from "~/components/primitives/DateTime"; import { Header2, Header3 } from "~/components/primitives/Headers"; import { Paragraph } from "~/components/primitives/Paragraph"; import { Spinner } from "~/components/primitives/Spinner"; @@ -234,7 +234,7 @@ function DetailsTab({ log, runPath, searchTerm }: { log: LogEntry; runPath: stri
Timestamp
- +
diff --git a/apps/webapp/app/components/logs/LogsTable.tsx b/apps/webapp/app/components/logs/LogsTable.tsx index e8e785ae791..a361d95c5e6 100644 --- a/apps/webapp/app/components/logs/LogsTable.tsx +++ b/apps/webapp/app/components/logs/LogsTable.tsx @@ -1,4 +1,5 @@ import { ArrowPathIcon, ArrowTopRightOnSquareIcon } from "@heroicons/react/20/solid"; +import { Link } from "@remix-run/react"; import { useEffect, useRef, useState } from "react"; import { cn } from "~/utils/cn"; import { Button } from "~/components/primitives/Buttons"; @@ -8,7 +9,7 @@ import { useProject } from "~/hooks/useProject"; import type { LogEntry } from "~/presenters/v3/LogsListPresenter.server"; import { getLevelColor, highlightSearchText } from "~/utils/logUtils"; import { v3RunSpanPath } from "~/utils/pathBuilder"; -import { DateTime } from "../primitives/DateTime"; +import { DateTimeAccurate } from "../primitives/DateTime"; import { Paragraph } from "../primitives/Paragraph"; import { Spinner } from "../primitives/Spinner"; import { TruncatedCopyableValue } from "../primitives/TruncatedCopyableValue"; @@ -24,8 +25,6 @@ import { TableRow, type TableVariant, } from "../primitives/Table"; -import { PopoverMenuItem } from "~/components/primitives/Popover"; -import { Link } from "@remix-run/react"; type LogsTableProps = { logs: LogEntry[]; @@ -34,6 +33,7 @@ type LogsTableProps = { isLoadingMore?: boolean; hasMore?: boolean; onLoadMore?: () => void; + onCheckForMore?: () => void; variant?: TableVariant; selectedLogId?: string; onLogSelect?: (logId: string) => void; @@ -63,6 +63,7 @@ export function LogsTable({ isLoadingMore = false, hasMore = false, onLoadMore, + onCheckForMore, selectedLogId, onLogSelect, }: LogsTableProps) { @@ -161,7 +162,7 @@ export function LogsTable({ boxShadow: getLevelBoxShadow(log.level), }} > - + @@ -203,20 +204,15 @@ export function LogsTable({ {/* Infinite scroll trigger */} {hasMore && logs.length > 0 && (
-
+
Loading more…
)} - {/* Show all logs message */} + {/* Show all logs message with check for more button */} {!hasMore && logs.length > 0 && (
-
+
Showing all {logs.length} logs
diff --git a/apps/webapp/app/components/primitives/DateTime.tsx b/apps/webapp/app/components/primitives/DateTime.tsx index d1bbbffb4a0..906bbf8b214 100644 --- a/apps/webapp/app/components/primitives/DateTime.tsx +++ b/apps/webapp/app/components/primitives/DateTime.tsx @@ -1,4 +1,5 @@ import { GlobeAltIcon, GlobeAmericasIcon } from "@heroicons/react/20/solid"; +import { useRouteLoaderData } from "@remix-run/react"; import { Laptop } from "lucide-react"; import { memo, type ReactNode, useMemo, useSyncExternalStore } from "react"; import { CopyButton } from "./CopyButton"; @@ -19,7 +20,7 @@ function getLocalTimeZone(): string { // For SSR compatibility: returns "UTC" on server, actual timezone on client function subscribeToTimeZone() { // No-op - timezone doesn't change - return () => { }; + return () => {}; } function getTimeZoneSnapshot(): string { @@ -39,6 +40,18 @@ export function useLocalTimeZone(): string { return useSyncExternalStore(subscribeToTimeZone, getTimeZoneSnapshot, getServerTimeZoneSnapshot); } +/** + * Hook to get the user's preferred timezone. + * Returns the timezone stored in the user's preferences cookie (from root loader), + * falling back to the browser's local timezone if not set. + */ +export function useUserTimeZone(): string { + const rootData = useRouteLoaderData("root") as { timezone?: string } | undefined; + const localTimeZone = useLocalTimeZone(); + // Use stored timezone from cookie, or fall back to browser's local timezone + return rootData?.timezone && rootData.timezone !== "UTC" ? rootData.timezone : localTimeZone; +} + type DateTimeProps = { date: Date | string; timeZone?: string; @@ -63,7 +76,7 @@ export const DateTime = ({ hour12 = true, }: DateTimeProps) => { const locales = useLocales(); - const localTimeZone = useLocalTimeZone(); + const userTimeZone = useUserTimeZone(); const realDate = useMemo(() => (typeof date === "string" ? new Date(date) : date), [date]); @@ -71,7 +84,7 @@ export const DateTime = ({ {formatDateTime( realDate, - timeZone ?? localTimeZone, + timeZone ?? userTimeZone, locales, includeSeconds, includeTime, @@ -91,7 +104,7 @@ export const DateTime = ({ } @@ -167,7 +180,7 @@ export function formatDateTimeISO(date: Date, timeZone: string): string { // New component that only shows date when it changes export const SmartDateTime = ({ date, previousDate = null, hour12 = true }: DateTimeProps) => { const locales = useLocales(); - const localTimeZone = useLocalTimeZone(); + const userTimeZone = useUserTimeZone(); const realDate = typeof date === "string" ? new Date(date) : date; const realPrevDate = previousDate ? typeof previousDate === "string" @@ -180,8 +193,8 @@ export const SmartDateTime = ({ date, previousDate = null, hour12 = true }: Date // Format with appropriate function const formattedDateTime = showDatePart - ? formatSmartDateTime(realDate, localTimeZone, locales, hour12) - : formatTimeOnly(realDate, localTimeZone, locales, hour12); + ? formatSmartDateTime(realDate, userTimeZone, locales, hour12) + : formatTimeOnly(realDate, userTimeZone, locales, hour12); return {formattedDateTime.replace(/\s/g, String.fromCharCode(32))}; }; @@ -235,14 +248,16 @@ function formatTimeOnly( const DateTimeAccurateInner = ({ date, - timeZone = "UTC", + timeZone, previousDate = null, showTooltip = true, hideDate = false, hour12 = true, }: DateTimeProps) => { const locales = useLocales(); - const localTimeZone = useLocalTimeZone(); + const userTimeZone = useUserTimeZone(); + // Use provided timeZone prop if available, otherwise fall back to user's preferred timezone + const displayTimeZone = timeZone ?? userTimeZone; const realDate = typeof date === "string" ? new Date(date) : date; const realPrevDate = previousDate ? typeof previousDate === "string" @@ -253,13 +268,13 @@ const DateTimeAccurateInner = ({ // Smart formatting based on whether date changed const formattedDateTime = useMemo(() => { return hideDate - ? formatTimeOnly(realDate, localTimeZone, locales, hour12) + ? formatTimeOnly(realDate, displayTimeZone, locales, hour12) : realPrevDate ? isSameDay(realDate, realPrevDate) - ? formatTimeOnly(realDate, localTimeZone, locales, hour12) - : formatDateTimeAccurate(realDate, localTimeZone, locales, hour12) - : formatDateTimeAccurate(realDate, localTimeZone, locales, hour12); - }, [realDate, localTimeZone, locales, hour12, hideDate, previousDate]); + ? formatTimeOnly(realDate, displayTimeZone, locales, hour12) + : formatDateTimeAccurate(realDate, displayTimeZone, locales, hour12) + : formatDateTimeAccurate(realDate, displayTimeZone, locales, hour12); + }, [realDate, displayTimeZone, locales, hour12, hideDate, previousDate]); if (!showTooltip) return {formattedDateTime.replace(/\s/g, String.fromCharCode(32))}; @@ -268,7 +283,7 @@ const DateTimeAccurateInner = ({ ); @@ -328,9 +343,9 @@ function formatDateTimeAccurate( export const DateTimeShort = ({ date, hour12 = true }: DateTimeProps) => { const locales = useLocales(); - const localTimeZone = useLocalTimeZone(); + const userTimeZone = useUserTimeZone(); const realDate = typeof date === "string" ? new Date(date) : date; - const formattedDateTime = formatDateTimeShort(realDate, localTimeZone, locales, hour12); + const formattedDateTime = formatDateTimeShort(realDate, userTimeZone, locales, hour12); return {formattedDateTime.replace(/\s/g, String.fromCharCode(32))}; }; diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index 6733af0addb..829cf3c6847 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -1181,7 +1181,7 @@ const EnvironmentSchema = z CLICKHOUSE_COMPRESSION_REQUEST: z.string().default("1"), // Logs List Query Settings (for paginated log views) - CLICKHOUSE_LOGS_LIST_MAX_MEMORY_USAGE: z.coerce.number().int().default(256_000_000), + CLICKHOUSE_LOGS_LIST_MAX_MEMORY_USAGE: z.coerce.number().int().default(1_000_000_000), CLICKHOUSE_LOGS_LIST_MAX_BYTES_BEFORE_EXTERNAL_SORT: z.coerce .number() .int() diff --git a/apps/webapp/app/hooks/useShortcutKeys.tsx b/apps/webapp/app/hooks/useShortcutKeys.tsx index 0674b5bc0b4..319a91cad84 100644 --- a/apps/webapp/app/hooks/useShortcutKeys.tsx +++ b/apps/webapp/app/hooks/useShortcutKeys.tsx @@ -43,8 +43,10 @@ export function useShortcutKeys({ useHotkeys( keys, - (event, hotkeysEvent) => { - action(event); + (event) => { + if (!event.repeat) { + action(event); + } }, { enabled: isEnabled, diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index 69a84932a3c..b1c03f8b74c 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -354,6 +354,8 @@ export class LogsListPresenter extends BasePresenter { queryBuilder.where("kind NOT IN {debugKinds: Array(String)}", { debugKinds: ["DEBUG_EVENT"], }); + + queryBuilder.where("NOT ((kind = 'LOG_INFO') AND (attributes_text = '{}'))"); } queryBuilder.where("kind NOT IN {debugSpans: Array(String)}", { diff --git a/apps/webapp/app/root.tsx b/apps/webapp/app/root.tsx index fb5fef9c846..c6027b1a6d3 100644 --- a/apps/webapp/app/root.tsx +++ b/apps/webapp/app/root.tsx @@ -10,10 +10,12 @@ import { RouteErrorDisplay } from "./components/ErrorDisplay"; import { AppContainer, MainCenteredContainer } from "./components/layout/AppLayout"; import { ShortcutsProvider } from "./components/primitives/ShortcutsProvider"; import { Toast } from "./components/primitives/Toast"; +import { TimezoneSetter } from "./components/TimezoneSetter"; import { env } from "./env.server"; import { featuresForRequest } from "./features.server"; import { usePostHog } from "./hooks/usePostHog"; import { getUser } from "./services/session.server"; +import { getTimezonePreference } from "./services/preferences/uiPreferences.server"; import { appEnvTitleTag } from "./utils"; export const links: LinksFunction = () => { @@ -50,6 +52,7 @@ export const loader = async ({ request }: LoaderFunctionArgs) => { const toastMessage = session.get("toastMessage") as ToastMessage; const posthogProjectKey = env.POSTHOG_PROJECT_KEY; const features = featuresForRequest(request); + const timezone = await getTimezonePreference(request); const kapa = { websiteId: env.KAPA_AI_WEBSITE_ID, @@ -65,6 +68,7 @@ export const loader = async ({ request }: LoaderFunctionArgs) => { appOrigin: env.APP_ORIGIN, triggerCliTag: env.TRIGGER_CLI_TAG, kapa, + timezone, }, { headers: { "Set-Cookie": await commitSession(session) } } ); @@ -118,6 +122,7 @@ export default function App() { + diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx index 6237d699b3e..84dbc2deda5 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx @@ -10,11 +10,10 @@ import { } from "remix-typedjson"; import { requireUser } from "~/services/session.server"; import { getCurrentPlan } from "~/services/platform.v3.server"; - import { EnvironmentParamSchema } from "~/utils/pathBuilder"; import { findProjectBySlug } from "~/models/project.server"; import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; -import { LogsListPresenter } from "~/presenters/v3/LogsListPresenter.server"; +import { LogsListPresenter, LogEntry } from "~/presenters/v3/LogsListPresenter.server"; import type { LogLevel } from "~/utils/logUtils"; import { $replica, prisma } from "~/db.server"; import { clickhouseClient } from "~/services/clickhouseInstance.server"; @@ -26,7 +25,6 @@ import { Spinner } from "~/components/primitives/Spinner"; import { Paragraph } from "~/components/primitives/Paragraph"; import { Callout } from "~/components/primitives/Callout"; import { LogsTable } from "~/components/logs/LogsTable"; -import type { LogEntry } from "~/presenters/v3/LogsListPresenter.server"; import { LogDetailView } from "~/components/logs/LogDetailView"; import { LogsSearchInput } from "~/components/logs/LogsSearchInput"; import { LogsLevelFilter } from "~/components/logs/LogsLevelFilter"; @@ -154,7 +152,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { to, includeDebugLogs: isAdmin && showDebug, defaultPeriod: "1h", - retentionLimitDays, + retentionLimitDays }) .catch((error) => { if (error instanceof ServiceValidationError) { @@ -168,11 +166,12 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { isAdmin, showDebug, defaultPeriod: "1h", + retentionLimitDays, }); }; export default function Page() { - const { data, isAdmin, showDebug, defaultPeriod } = + const { data, isAdmin, showDebug, defaultPeriod, retentionLimitDays } = useTypedLoaderData(); return ( @@ -203,6 +202,7 @@ export default function Page() { isAdmin={isAdmin} showDebug={showDebug} defaultPeriod={defaultPeriod} + retentionLimitDays={retentionLimitDays} />
@@ -221,6 +221,7 @@ export default function Page() { isAdmin={isAdmin} showDebug={showDebug} defaultPeriod={defaultPeriod} + retentionLimitDays={retentionLimitDays} />
@@ -237,6 +238,7 @@ export default function Page() { isAdmin={isAdmin} showDebug={showDebug} defaultPeriod={defaultPeriod} + retentionLimitDays={retentionLimitDays} /> - - Showing last {retentionDays} {retentionDays === 1 ? 'day' : 'days'} - - - Upgrade - - - ); -} - function FiltersBar({ list, isAdmin, showDebug, defaultPeriod, + retentionLimitDays, }: { list?: Exclude["data"]>, { error: string }>; isAdmin: boolean; showDebug: boolean; defaultPeriod?: string; + retentionLimitDays: number; }) { const location = useOptimisticLocation(); const searchParams = new URLSearchParams(location.search); @@ -317,12 +297,16 @@ function FiltersBar({ <> - - + + {hasFilters && (
-
- {list?.retention?.wasClamped && ( - - )} {isAdmin && ( (location.search); + // Track whether the current fetch is a "check for new" request vs "load more" + const isCheckingForNewRef = useRef(false); // Clear accumulated logs immediately when filters change (for instant visual feedback) useEffect(() => { @@ -410,7 +394,7 @@ function LogsList({ } }, [selectedLogId]); - // Append new logs when fetcher completes (with deduplication) + // Append/prepend new logs when fetcher completes (with deduplication) useEffect(() => { if (fetcher.data && fetcher.state === "idle") { // Ignore fetcher data if it was loaded for a different filter state @@ -418,14 +402,25 @@ function LogsList({ return; } - const existingIds = new Set(accumulatedLogs.map((log) => log.id)); - const newLogs = fetcher.data.logs.filter((log) => !existingIds.has(log.id)); - if (newLogs.length > 0) { - setAccumulatedLogs((prev) => [...prev, ...newLogs]); + if (isCheckingForNewRef.current) { + // "Check for new" - prepend new logs, don't update cursor + setAccumulatedLogs((prev) => { + const existingIds = new Set(prev.map((log) => log.id)); + const newLogs = fetcher.data!.logs.filter((log) => !existingIds.has(log.id)); + return newLogs.length > 0 ? [...newLogs, ...prev] : prev; + }); + isCheckingForNewRef.current = false; + } else { + // "Load more" - append logs and update cursor + setAccumulatedLogs((prev) => { + const existingIds = new Set(prev.map((log) => log.id)); + const newLogs = fetcher.data!.logs.filter((log) => !existingIds.has(log.id)); + return newLogs.length > 0 ? [...prev, ...newLogs] : prev; + }); + setNextCursor(fetcher.data.pagination.next); } - setNextCursor(fetcher.data.pagination.next); } - }, [fetcher.data, fetcher.state, accumulatedLogs, location.search]); + }, [fetcher.data, fetcher.state, location.search]); // Build resource URL for loading more const loadMoreUrl = useMemo(() => { @@ -477,6 +472,18 @@ function LogsList({ updateUrlWithLog(undefined); }, [updateUrlWithLog, startTransition]); + const handleCheckForMore = useCallback(() => { + if (fetcher.state !== "idle") return; + // Fetch without cursor to check for new logs + const resourcePath = `/resources${location.pathname}`; + const params = new URLSearchParams(location.search); + params.delete("cursor"); + params.delete("log"); + fetcherFilterStateRef.current = location.search; + isCheckingForNewRef.current = true; + fetcher.load(`${resourcePath}?${params.toString()}`); + }, [fetcher, location.pathname, location.search]); + return ( @@ -488,6 +495,7 @@ function LogsList({ isLoadingMore={fetcher.state === "loading"} hasMore={!!nextCursor} onLoadMore={handleLoadMore} + onCheckForMore={handleCheckForMore} selectedLogId={selectedLogId} onLogSelect={handleLogSelect} /> diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam/route.tsx index 1ffd128b308..e02d29b95b5 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam/route.tsx @@ -1822,7 +1822,7 @@ function PreviousRunButton({ to }: { to: string | null }) { leadingIconClassName="size-3 group-hover/button:text-text-bright transition-colors" className={cn("flex size-6 max-w-6 items-center", !to && "cursor-not-allowed opacity-50")} onClick={(e) => !to && e.preventDefault()} - shortcut={{ key: "[" }} + shortcut={{ key: "j" }} tooltip="Previous Run" disabled={!to} replace @@ -1841,7 +1841,7 @@ function NextRunButton({ to }: { to: string | null }) { leadingIconClassName="size-3 group-hover/button:text-text-bright transition-colors" className={cn("flex size-6 max-w-6 items-center", !to && "cursor-not-allowed opacity-50")} onClick={(e) => !to && e.preventDefault()} - shortcut={{ key: "]" }} + shortcut={{ key: "k" }} tooltip="Next Run" disabled={!to} replace diff --git a/apps/webapp/app/routes/resources.timezone.ts b/apps/webapp/app/routes/resources.timezone.ts new file mode 100644 index 00000000000..f06b44e6149 --- /dev/null +++ b/apps/webapp/app/routes/resources.timezone.ts @@ -0,0 +1,43 @@ +import { type ActionFunctionArgs, json } from "@remix-run/server-runtime"; +import { z } from "zod"; +import { + setTimezonePreference, + uiPreferencesStorage, +} from "~/services/preferences/uiPreferences.server"; + +const schema = z.object({ + timezone: z.string().min(1).max(100), +}); + +// Cache the supported timezones to avoid repeated calls +const supportedTimezones = new Set(Intl.supportedValuesOf("timeZone")); + +export async function action({ request }: ActionFunctionArgs) { + let data: unknown; + try { + data = await request.json(); + } catch { + return json({ success: false, error: "Invalid JSON" }, { status: 400 }); + } + + const result = schema.safeParse(data); + + if (!result.success) { + return json({ success: false, error: "Invalid timezone" }, { status: 400 }); + } + + if (!supportedTimezones.has(result.data.timezone)) { + return json({ success: false, error: "Invalid timezone" }, { status: 400 }); + } + + const session = await setTimezonePreference(result.data.timezone, request); + + return json( + { success: true }, + { + headers: { + "Set-Cookie": await uiPreferencesStorage.commitSession(session), + }, + } + ); +} diff --git a/apps/webapp/app/services/preferences/uiPreferences.server.ts b/apps/webapp/app/services/preferences/uiPreferences.server.ts index 0d23a546c2d..44282499db3 100644 --- a/apps/webapp/app/services/preferences/uiPreferences.server.ts +++ b/apps/webapp/app/services/preferences/uiPreferences.server.ts @@ -42,3 +42,15 @@ export async function setRootOnlyFilterPreference(rootOnly: boolean, request: Re session.set("rootOnly", rootOnly); return session; } + +export async function getTimezonePreference(request: Request): Promise { + const session = await getUiPreferencesSession(request); + const timezone = session.get("timezone"); + return typeof timezone === "string" ? timezone : "UTC"; +} + +export async function setTimezonePreference(timezone: string, request: Request) { + const session = await getUiPreferencesSession(request); + session.set("timezone", timezone); + return session; +} From bc63edd6bf4e142c5fa677cb7fd2fb4e9fe786db Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 10 Feb 2026 12:03:24 +0000 Subject: [PATCH 009/225] chore(repo): adopt vouch with issue based workflow and require for PRs (#3022) Adopting [https://github.com/mitchellh/vouch](vouch) so we can help potential contributors by requiring a conversation before they can submit a PR. Too many contributors have been skipping the conversation part of contributing to an OSS repo and skipping right ahead to submitting PRs --- Open with Devin --- .github/ISSUE_TEMPLATE/vouch-request.yml | 28 +++++++++++++++++++++ .github/VOUCHED.td | 13 ++++++++++ .github/workflows/vouch-check-pr.yml | 23 +++++++++++++++++ .github/workflows/vouch-manage-by-issue.yml | 25 ++++++++++++++++++ CONTRIBUTING.md | 13 ++++++++++ 5 files changed, 102 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/vouch-request.yml create mode 100644 .github/VOUCHED.td create mode 100644 .github/workflows/vouch-check-pr.yml create mode 100644 .github/workflows/vouch-manage-by-issue.yml diff --git a/.github/ISSUE_TEMPLATE/vouch-request.yml b/.github/ISSUE_TEMPLATE/vouch-request.yml new file mode 100644 index 00000000000..9ffe04a8984 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/vouch-request.yml @@ -0,0 +1,28 @@ +name: Vouch Request +description: Request to be vouched as a contributor +labels: ["vouch-request"] +body: + - type: markdown + attributes: + value: | + ## Vouch Request + + We use [vouch](https://github.com/mitchellh/vouch) to manage contributor trust. PRs from unvouched users are automatically closed. + + To get vouched, fill out this form. A maintainer will review your request and vouch for you by commenting on this issue. + - type: textarea + id: context + attributes: + label: Why do you want to contribute? + description: Tell us a bit about yourself and what you'd like to work on. + placeholder: "I'd like to fix a bug I found in..." + validations: + required: true + - type: textarea + id: prior-work + attributes: + label: Prior contributions or relevant experience + description: Links to previous open source work, relevant projects, or anything that helps us understand your background. + placeholder: "https://github.com/..." + validations: + required: false diff --git a/.github/VOUCHED.td b/.github/VOUCHED.td new file mode 100644 index 00000000000..a9f276737e9 --- /dev/null +++ b/.github/VOUCHED.td @@ -0,0 +1,13 @@ +# Vouched contributors for Trigger.dev +# See: https://github.com/mitchellh/vouch +# +# Org members +0ski +D-K-P +ericallam +matt-aitken +mpcgrid +myftija +nicktrn +samejr +isshaddad \ No newline at end of file diff --git a/.github/workflows/vouch-check-pr.yml b/.github/workflows/vouch-check-pr.yml new file mode 100644 index 00000000000..a2f4c6d1b6b --- /dev/null +++ b/.github/workflows/vouch-check-pr.yml @@ -0,0 +1,23 @@ +name: Vouch - Check PR + +on: + pull_request_target: + types: [opened, reopened] + +permissions: + contents: read + pull-requests: write + issues: read + +jobs: + check-pr: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: mitchellh/vouch/action/check-pr@main + with: + pr-number: ${{ github.event.pull_request.number }} + auto-close: true + require-vouch: true + env: + GH_TOKEN: ${{ github.token }} diff --git a/.github/workflows/vouch-manage-by-issue.yml b/.github/workflows/vouch-manage-by-issue.yml new file mode 100644 index 00000000000..36de055752f --- /dev/null +++ b/.github/workflows/vouch-manage-by-issue.yml @@ -0,0 +1,25 @@ +name: Vouch - Manage by Issue + +on: + issue_comment: + types: [created] + +permissions: + contents: write + issues: write + +jobs: + manage: + runs-on: ubuntu-latest + if: >- + contains(github.event.comment.body, 'vouch') || + contains(github.event.comment.body, 'denounce') || + contains(github.event.comment.body, 'unvouch') + steps: + - uses: actions/checkout@v4 + - uses: mitchellh/vouch/action/manage-by-issue@main + with: + comment-id: ${{ github.event.comment.id }} + issue-id: ${{ github.event.issue.number }} + env: + GH_TOKEN: ${{ github.token }} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0162350ffc1..fbd290f0a1d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -223,6 +223,19 @@ See the [Job Catalog](./references/job-catalog/README.md) file for more. 4. Navigate to your trigger.dev instance ([http://localhost:3030](http://localhost:3030/)), to see the jobs. You can use the test feature to trigger them. +## Getting vouched (required before opening a PR) + +We use [vouch](https://github.com/mitchellh/vouch) to manage contributor trust. **PRs from unvouched users are automatically closed.** + +Before you open your first pull request, you need to be vouched by a maintainer. Here's how: + +1. Open a [Vouch Request](https://github.com/triggerdotdev/trigger.dev/issues/new?template=vouch-request.yml) issue. +2. Tell us what you'd like to work on and share any relevant background. +3. A maintainer will review your request and vouch for you by commenting on the issue. +4. Once vouched, your PRs will be accepted normally. + +If you're unsure whether you're already vouched, go ahead and open a PR — the check will tell you. + ## Making a pull request **If you get errors, be sure to fix them before committing.** From ebffa1039ce41ce7f09ac6d558c9cc7737c70d36 Mon Sep 17 00:00:00 2001 From: DKP <8297864+D-K-P@users.noreply.github.com> Date: Tue, 10 Feb 2026 16:47:00 +0000 Subject: [PATCH 010/225] docs: added Cursor background agent docs (#3023) - Adds a new example project guide for running Cursor's headless CLI agent as a Trigger.dev task with live Realtime Streams output - New doc page at `guides/example-projects/cursor-background-agent.mdx` - Added to sidebar nav and example projects table --- Open with Devin --- docs/docs.json | 1 + .../cursor-background-agent.mdx | 105 ++++++++++++++++++ docs/guides/introduction.mdx | 1 + 3 files changed, 107 insertions(+) create mode 100644 docs/guides/example-projects/cursor-background-agent.mdx diff --git a/docs/docs.json b/docs/docs.json index 4ec2fafc0eb..41b081d90eb 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -377,6 +377,7 @@ "guides/example-projects/claude-changelog-generator", "guides/example-projects/claude-github-wiki", "guides/example-projects/claude-thinking-chatbot", + "guides/example-projects/cursor-background-agent", "guides/example-projects/human-in-the-loop-workflow", "guides/example-projects/mastra-agents-with-memory", "guides/example-projects/meme-generator-human-in-the-loop", diff --git a/docs/guides/example-projects/cursor-background-agent.mdx b/docs/guides/example-projects/cursor-background-agent.mdx new file mode 100644 index 00000000000..fa906d2136f --- /dev/null +++ b/docs/guides/example-projects/cursor-background-agent.mdx @@ -0,0 +1,105 @@ +--- +title: "Background Cursor agent using the Cursor CLI" +sidebarTitle: "Cursor background agent" +description: "Run Cursor's headless CLI agent in a Trigger.dev task and stream the live output to the frontend using Trigger.dev Realtime Streams." +--- + +import RealtimeLearnMore from "/snippets/realtime-learn-more.mdx"; + +## Overview + +This example runs [Cursor's headless CLI](https://cursor.com/cli) in a Trigger.dev task. The agent spawns as a child process, and its NDJSON stdout is parsed and piped to the browser in real-time using [Realtime Streams](/realtime/react-hooks/streams). The result is a live terminal UI that renders each Cursor event (system messages, assistant responses, tool calls, results) as it happens. + +**Tech stack:** + +- **[Next.js](https://nextjs.org/)** for the web app (App Router with server actions) +- **[Cursor CLI](https://cursor.com/cli)** for the headless AI coding agent +- **[Trigger.dev](https://trigger.dev)** for task orchestration, real-time streaming, and deployment + +## Video + + + +**Features:** + +- **Build extensions**: Installs the `cursor-agent` binary into the task container image using `addLayer`, demonstrating how to ship system binaries with your tasks +- **Realtime Streams v2**: NDJSON from a child process stdout is parsed and piped directly to the browser using `streams.define()` and `.pipe()` +- **Live terminal rendering**: Each Cursor event renders as a distinct row with auto-scroll +- **Long-running tasks**: Cursor agent runs for minutes; Trigger.dev handles lifecycle, timeouts, and retries automatically +- **Machine selection**: Uses the `medium-2x` preset for resource-intensive CLI tools +- **LLM model picker**: Switch between models from the UI before triggering a run + +## GitHub repo + + + Click here to view the full code for this project in our examples repository on GitHub. You can + fork it and use it as a starting point for your own project. + + +## How it works + +### Task orchestration + +The task spawns the Cursor CLI as a child process and streams its output to the frontend: + +1. A Next.js server action triggers the `cursor-agent` task with the user's prompt and selected model +2. The task spawns the Cursor CLI binary using a helper that returns a typed NDJSON stream and a `waitUntilExit()` promise +3. Each line of NDJSON stdout is parsed into typed Cursor events and piped to a Realtime Stream +4. The frontend subscribes to the stream using `useRealtimeRunWithStreams` and renders each event in a terminal UI +5. The task waits for the CLI process to exit and returns the result + +### Build extension for system binaries + +The example includes a custom build extension that installs the `cursor-agent` binary into the container image using `addLayer`. At runtime, the binary is copied to `/tmp` and given execute permissions; this is a workaround needed when the container runtime strips execute permissions from added layers. + +```ts extensions/cursor-cli.ts +export const cursorCli = defineExtension({ + name: "cursor-cli", + onBuildComplete(params) { + params.addLayer({ + id: "cursor-cli", + image: { + instructions: [ + `COPY cursor-agent /usr/local/bin/cursor-agent`, + `RUN chmod +x /usr/local/bin/cursor-agent`, + ], + }, + }); + }, +}); +``` + +### Streaming with Realtime Streams v2 + +The stream is defined with a typed schema and piped from the child process: + +```ts trigger/cursor-stream.ts +export const cursorStream = streams.define("cursor", cursorEventSchema); +``` + +```ts trigger/cursor-agent.ts +const { stream, waitUntilExit } = spawnCursorAgent({ prompt, model }); +cursorStream.pipe(stream); +await waitUntilExit(); +``` + +On the frontend, the `useRealtimeRunWithStreams` hook subscribes to these events and renders them as they arrive. + +## Relevant code + +- **Build extension + spawn helper**: [extensions/cursor-cli.ts](https://github.com/triggerdotdev/examples/blob/main/cursor-cli-demo/extensions/cursor-cli.ts): installs the binary and provides a typed NDJSON stream with `waitUntilExit()` +- **Task definition**: [trigger/cursor-agent.ts](https://github.com/triggerdotdev/examples/blob/main/cursor-cli-demo/trigger/cursor-agent.ts): spawns the CLI, pipes the stream, waits for exit +- **Stream definition**: [trigger/cursor-stream.ts](https://github.com/triggerdotdev/examples/blob/main/cursor-cli-demo/trigger/cursor-stream.ts): Realtime Streams v2 stream with typed schema +- **Terminal UI**: [components/terminal.tsx](https://github.com/triggerdotdev/examples/blob/main/cursor-cli-demo/components/terminal.tsx): renders live events using `useRealtimeRunWithStreams` +- **Event types**: [lib/cursor-events.ts](https://github.com/triggerdotdev/examples/blob/main/cursor-cli-demo/lib/cursor-events.ts): TypeScript types and parsers for Cursor NDJSON events +- **Trigger config**: [trigger.config.ts](https://github.com/triggerdotdev/examples/blob/main/cursor-cli-demo/trigger.config.ts): project config with the cursor CLI build extension + + diff --git a/docs/guides/introduction.mdx b/docs/guides/introduction.mdx index fec3242029b..116c8539b0d 100644 --- a/docs/guides/introduction.mdx +++ b/docs/guides/introduction.mdx @@ -56,6 +56,7 @@ Example projects are full projects with example repos you can fork and use. Thes | [Claude changelog generator](/guides/example-projects/claude-changelog-generator) | Automatically generate professional changelogs from git commits using Claude. | — | [View the repo](https://github.com/triggerdotdev/examples/tree/main/changelog-generator) | | [Claude GitHub wiki agent](/guides/example-projects/claude-github-wiki) | Generate and maintain GitHub wiki documentation with Claude-powered analysis. | — | [View the repo](https://github.com/triggerdotdev/examples/tree/main/claude-agent-github-wiki) | | [Claude thinking chatbot](/guides/example-projects/claude-thinking-chatbot) | Use Vercel's AI SDK and Anthropic's Claude 3.7 model to create a thinking chatbot. | Next.js | [View the repo](https://github.com/triggerdotdev/examples/tree/main/claude-thinking-chatbot) | +| [Cursor background agent](/guides/example-projects/cursor-background-agent) | Run Cursor's headless CLI agent as a background task, streaming live output to the browser. | Next.js | [View the repo](https://github.com/triggerdotdev/examples/tree/main/cursor-cli-demo) | | [Human-in-the-loop workflow](/guides/example-projects/human-in-the-loop-workflow) | Create audio summaries of newspaper articles using a human-in-the-loop workflow built with ReactFlow and Trigger.dev waitpoint tokens. | Next.js | [View the repo](https://github.com/triggerdotdev/examples/tree/main/article-summary-workflow) | | [Mastra agents with memory](/guides/example-projects/mastra-agents-with-memory) | Use Mastra to create a weather agent that can collect live weather data and generate clothing recommendations. | — | [View the repo](https://github.com/triggerdotdev/examples/tree/main/mastra-agents) | | [OpenAI Agents SDK for Python guardrails](/guides/example-projects/openai-agent-sdk-guardrails) | Use the OpenAI Agents SDK for Python to create a guardrails system for your AI agents. | — | [View the repo](https://github.com/triggerdotdev/examples/tree/main/openai-agent-sdk-guardrails-examples) | From 48a96efbdc22cac090c8b23ed2542b5c4f85cd42 Mon Sep 17 00:00:00 2001 From: Oskar Otwinowski Date: Tue, 10 Feb 2026 18:30:56 +0100 Subject: [PATCH 011/225] chore(webapp): Expose Vercel errors (#3025) --- .../v3/VercelSettingsPresenter.server.ts | 14 ++++++++++++++ ...projects.$projectParam.env.$envParam.vercel.tsx | 2 ++ 2 files changed, 16 insertions(+) diff --git a/apps/webapp/app/presenters/v3/VercelSettingsPresenter.server.ts b/apps/webapp/app/presenters/v3/VercelSettingsPresenter.server.ts index d92fdbf7f7a..26688d41fdd 100644 --- a/apps/webapp/app/presenters/v3/VercelSettingsPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/VercelSettingsPresenter.server.ts @@ -25,6 +25,7 @@ export type VercelSettingsResult = { enabled: boolean; hasOrgIntegration: boolean; authInvalid?: boolean; + authError?: string; connectedProject?: { id: string; vercelProjectId: string; @@ -52,6 +53,7 @@ export type VercelOnboardingData = { availableProjects: VercelAvailableProject[]; hasProjectSelected: boolean; authInvalid?: boolean; + authError?: string; existingVariables: Record; // Environment slugs (non-archived only) gitHubAppInstallations: GitHubAppInstallation[]; isGitHubConnected: boolean; @@ -98,6 +100,7 @@ export class VercelSettingsPresenter extends BasePresenter { enabled: true, hasOrgIntegration: false, authInvalid: true, + authError: orgIntegrationResult.error instanceof Error ? orgIntegrationResult.error.message : "Failed to fetch organization integration", connectedProject: undefined, isGitHubConnected: false, hasStagingEnvironment: false, @@ -116,6 +119,7 @@ export class VercelSettingsPresenter extends BasePresenter { enabled: true, hasOrgIntegration: true, authInvalid: true, + authError: tokenResult.isErr() ? tokenResult.error.message : "Vercel token is invalid", connectedProject: undefined, isGitHubConnected: false, hasStagingEnvironment: false, @@ -382,6 +386,7 @@ export class VercelSettingsPresenter extends BasePresenter { availableProjects: [], hasProjectSelected: false, authInvalid: true, + authError: tokenResult.isErr() ? tokenResult.error.message : "Vercel token is invalid", existingVariables: {}, gitHubAppInstallations, isGitHubConnected, @@ -397,6 +402,7 @@ export class VercelSettingsPresenter extends BasePresenter { availableProjects: [], hasProjectSelected: false, authInvalid: clientResult.error.authInvalid, + authError: clientResult.error.authInvalid ? clientResult.error.message : undefined, existingVariables: {}, gitHubAppInstallations, isGitHubConnected, @@ -426,6 +432,7 @@ export class VercelSettingsPresenter extends BasePresenter { availableProjects: [], hasProjectSelected: false, authInvalid: availableProjectsResult.error.authInvalid, + authError: availableProjectsResult.error.authInvalid ? availableProjectsResult.error.message : undefined, existingVariables: {}, gitHubAppInstallations, isGitHubConnected, @@ -472,12 +479,19 @@ export class VercelSettingsPresenter extends BasePresenter { (sharedEnvVarsResult.isErr() && sharedEnvVarsResult.error.authInvalid); if (authInvalid) { + const authError = + (customEnvironmentsResult.isErr() && customEnvironmentsResult.error.authInvalid && customEnvironmentsResult.error.message) || + (projectEnvVarsResult.isErr() && projectEnvVarsResult.error.authInvalid && projectEnvVarsResult.error.message) || + (sharedEnvVarsResult.isErr() && sharedEnvVarsResult.error.authInvalid && sharedEnvVarsResult.error.message) || + undefined; + return { customEnvironments: [], environmentVariables: [], availableProjects: availableProjectsResult.value, hasProjectSelected: true, authInvalid: true, + authError: authError || undefined, existingVariables: {}, gitHubAppInstallations, isGitHubConnected, diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel.tsx index c25f99b0554..bb0fca6d745 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.vercel.tsx @@ -188,10 +188,12 @@ export async function loader({ request, params }: LoaderFunctionArgs) { } const authInvalid = onboardingData?.authInvalid || result.authInvalid || false; + const authError = onboardingData?.authError || result.authError; return typedjson({ ...result, authInvalid, + authError, onboardingData, organizationSlug, projectSlug: projectParam, From 2feecece880bfb727bb8e5592e1016388a6d91b0 Mon Sep 17 00:00:00 2001 From: Saadi Myftija Date: Tue, 10 Feb 2026 19:44:44 +0100 Subject: [PATCH 012/225] fix(api): skip external build creation for native builds (#3024) Native builds don't use depot, but the `/deployments/:id/progress` endpoint was unconditionally generating depot build tokens. This is now fixed. The initialize deployment endpoint was already doing this check. --- Open with Devin --- .../app/v3/services/deployment.server.ts | 28 ++++++++++++------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/apps/webapp/app/v3/services/deployment.server.ts b/apps/webapp/app/v3/services/deployment.server.ts index 11d659ab221..848c06c4537 100644 --- a/apps/webapp/app/v3/services/deployment.server.ts +++ b/apps/webapp/app/v3/services/deployment.server.ts @@ -2,7 +2,7 @@ import { type AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { BaseService } from "./baseService.server"; import { errAsync, fromPromise, okAsync, type ResultAsync } from "neverthrow"; import { type WorkerDeployment, type Project } from "@trigger.dev/database"; -import { logger, type GitMeta, type DeploymentEvent } from "@trigger.dev/core/v3"; +import { BuildServerMetadata, logger, type GitMeta, type DeploymentEvent } from "@trigger.dev/core/v3"; import { TimeoutDeploymentService } from "./timeoutDeployment.server"; import { env } from "~/env.server"; import { createRemoteImageBuild } from "../remoteImageBuilder.server"; @@ -40,7 +40,7 @@ export class DeploymentService extends BaseService { friendlyId: string, updates: Partial & { git: GitMeta }> ) { - const validateDeployment = (deployment: Pick) => { + const validateDeployment = (deployment: Pick & { buildServerMetadata?: BuildServerMetadata }) => { if (deployment.status !== "PENDING" && deployment.status !== "INSTALLING") { logger.warn( "Attempted progressing deployment that is not in PENDING or INSTALLING status", @@ -75,14 +75,17 @@ export class DeploymentService extends BaseService { return okAsync({ id: deployment.id, status: "INSTALLING" as const }); }); - const createRemoteBuild = (deployment: Pick) => - fromPromise(createRemoteImageBuild(authenticatedEnv.project), (error) => ({ - type: "failed_to_create_remote_build" as const, - cause: error, - })); + const progressToBuilding = ( + deployment: Pick & { buildServerMetadata?: BuildServerMetadata } + ) => { + const createRemoteBuildIfNeeded = deployment.buildServerMetadata?.isNativeBuild + ? okAsync(undefined) + : fromPromise(createRemoteImageBuild(authenticatedEnv.project), (error) => ({ + type: "failed_to_create_remote_build" as const, + cause: error, + })); - const progressToBuilding = (deployment: Pick) => - createRemoteBuild(deployment) + return createRemoteBuildIfNeeded .andThen((externalBuildData) => fromPromise( this._prisma.workerDeployment.updateMany({ @@ -106,6 +109,7 @@ export class DeploymentService extends BaseService { } return okAsync({ id: deployment.id, status: "BUILDING" as const }); }); + }; const extendTimeout = (deployment: Pick) => fromPromise( @@ -432,6 +436,7 @@ export class DeploymentService extends BaseService { select: { status: true, id: true, + buildServerMetadata: true, imageReference: true, shortCode: true, environment: { @@ -454,6 +459,9 @@ export class DeploymentService extends BaseService { return errAsync({ type: "deployment_not_found" as const }); } return okAsync(deployment); - }); + }).map((deployment) => ({ + ...deployment, + buildServerMetadata: BuildServerMetadata.safeParse(deployment.buildServerMetadata).data, + })); } } From ddeb9c415ed2aeb25432da28a4d78c5942f29d5b Mon Sep 17 00:00:00 2001 From: Iss <74388823+isshaddad@users.noreply.github.com> Date: Tue, 10 Feb 2026 16:53:41 -0500 Subject: [PATCH 013/225] docs: heartbeats, Bun version, troubleshooting, and preview-branch cleanup (#3026) Doc updates: - new Heartbeats page (yield, progress, external updates) - Bun supported-version note - resource_exhausted troubleshooting with native builder link - GitHub Actions preview-branch example with closed trigger so branches archive when PRs close --- Open with Devin --- docs/deployment/preview-branches.mdx | 2 +- docs/docs.json | 1 + docs/github-actions.mdx | 35 +++++++++++++++++++++++++ docs/guides/frameworks/bun.mdx | 4 +++ docs/runs/heartbeats.mdx | 38 ++++++++++++++++++++++++++++ docs/troubleshooting.mdx | 6 ++++- 6 files changed, 84 insertions(+), 2 deletions(-) create mode 100644 docs/runs/heartbeats.mdx diff --git a/docs/deployment/preview-branches.mdx b/docs/deployment/preview-branches.mdx index 7e98e512876..f2a354e2e9d 100644 --- a/docs/deployment/preview-branches.mdx +++ b/docs/deployment/preview-branches.mdx @@ -72,7 +72,7 @@ This GitHub Action will: 1. Automatically create a preview branch for your Pull Request (if the branch doesn't already exist). 2. Deploy the preview branch. -3. Archive the preview branch when the Pull Request is merged/closed. +3. Archive the preview branch when the Pull Request is merged/closed. This only works if your workflow runs on **closed** PRs (`types: [opened, synchronize, reopened, closed]`). If you omit `closed`, branches won't be archived automatically. ```yml .github/workflows/trigger-preview-branches.yml name: Deploy to Trigger.dev (preview branches) diff --git a/docs/docs.json b/docs/docs.json index 41b081d90eb..5c2bddede0c 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -70,6 +70,7 @@ "machines", "idempotency", "runs/max-duration", + "runs/heartbeats", "tags", "runs/metadata", "tasks/streams", diff --git a/docs/github-actions.mdx b/docs/github-actions.mdx index 217d8baa73c..3f1c145926f 100644 --- a/docs/github-actions.mdx +++ b/docs/github-actions.mdx @@ -83,6 +83,41 @@ jobs: If you already have a GitHub action file, you can just add the final step "🚀 Deploy Trigger.dev" to your existing file. +## Preview branches + +To deploy to preview branches from Pull Requests and have them archived when PRs are merged or closed, use a workflow that runs on `pull_request` with **all four types** including `closed`: + +```yaml .github/workflows/trigger-preview-branches.yml +name: Deploy to Trigger.dev (preview branches) + +on: + pull_request: + types: [opened, synchronize, reopened, closed] + +jobs: + deploy-preview: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Use Node.js 20.x + uses: actions/setup-node@v4 + with: + node-version: "20.x" + + - name: Install dependencies + run: npm install + + - name: Deploy preview branch + run: npx trigger.dev@latest deploy --env preview + env: + TRIGGER_ACCESS_TOKEN: ${{ secrets.TRIGGER_ACCESS_TOKEN }} +``` + + + **Include `closed`** in the `pull_request.types` list. Without it, preview branches won't be archived when PRs are merged or closed, and you may hit the limit on active preview branches. See [Preview branches](/deployment/preview-branches#preview-branches-with-github-actions-recommended) for more details. + + ## Creating a Personal Access Token diff --git a/docs/guides/frameworks/bun.mdx b/docs/guides/frameworks/bun.mdx index e5f4ab1cd0d..d4115138250 100644 --- a/docs/guides/frameworks/bun.mdx +++ b/docs/guides/frameworks/bun.mdx @@ -14,6 +14,10 @@ import CliViewRunStep from "/snippets/step-view-run.mdx"; Bun will still be used to execute your tasks, even in the `dev` environment. + + **Supported Bun version:** Deployed tasks run on Bun 1.3.3. For local development, use Bun 1.3.x for compatibility. + + ## Known issues diff --git a/docs/runs/heartbeats.mdx b/docs/runs/heartbeats.mdx new file mode 100644 index 00000000000..b28f9fcbde7 --- /dev/null +++ b/docs/runs/heartbeats.mdx @@ -0,0 +1,38 @@ +--- +title: "Heartbeats" +sidebarTitle: "Heartbeats" +description: "Keep long-running or CPU-heavy tasks from being marked as stalled." +--- + +We send a heartbeat from your task to the platform every 30 seconds. If we don't receive a heartbeat within 5 minutes, we mark the run as stalled and stop it with a `TASK_RUN_STALLED_EXECUTING` error. + +Code that blocks the event loop for too long (for example, a tight loop doing synchronous work on a large dataset) can prevent heartbeats from being sent. In that case, use `heartbeats.yield()` inside the loop so the runtime can yield to the event loop and send a heartbeat. You can call it every iteration; the implementation only yields when needed. + +```ts +import { task, heartbeats } from "@trigger.dev/sdk"; + +export const processLargeDataset = task({ + id: "process-large-dataset", + run: async (payload: { items: string[] }) => { + for (const row of payload.items) { + await heartbeats.yield(); + processRow(row); + } + return { processed: payload.items.length }; + }, +}); + +function processRow(row: string) { + // synchronous CPU-heavy work +} +``` + +If you see `TASK_RUN_STALLED_EXECUTING`, see [Task run stalled executing](/troubleshooting#task-run-stalled-executing) in the troubleshooting guide. + +## Sending progress to Trigger.dev + +To stream progress or status updates to the dashboard and your app, use [run metadata](/runs/metadata). Call `metadata.set()` (or `metadata.append()`) as the task runs. The dashboard and [Realtime](/realtime) (including `runs.subscribeToRun` and the React hooks) receive those updates as they happen. See [Progress monitoring](/realtime/backend/subscribe#progress-monitoring) for a full example. + +## Sending updates to your own system + +Trigger.dev doesn’t push run updates to external services. To send progress or heartbeats to your own backend (for example Supabase Realtime), call your API or client from inside the task when you want to emit an update—e.g. in the same loop where you call `heartbeats.yield()` or `metadata.set()`. Use whatever your stack supports: HTTP, the Supabase client, or another SDK. diff --git a/docs/troubleshooting.mdx b/docs/troubleshooting.mdx index 7a003194fa7..13d9216f863 100644 --- a/docs/troubleshooting.mdx +++ b/docs/troubleshooting.mdx @@ -73,6 +73,10 @@ This happens because Docker Desktop left behind a config file that's still tryin Usually there will be some useful guidance below this message. If you can't figure out what's going wrong then join [our Discord](https://trigger.dev/discord) and create a Help forum post with a link to your deployment. +### `resource_exhausted` + +If you see a `resource_exhausted` error during deploy, the build may have hit resource limits on our build infrastructure. Try our [native builder](https://trigger.dev/changelog/deployments-with-native-builds). + ### `No loader is configured for ".node" files` This happens because `.node` files are native code and can't be bundled like other packages. To fix this, add your package to [`build.external`](/config/config-file#external) in the `trigger.config.ts` file like this: @@ -175,7 +179,7 @@ The most common situation this happens is if you're using `Promise.all` around s Make sure that you always use `await` when you call `trigger`, `triggerAndWait`, `batchTrigger`, and `batchTriggerAndWait`. If you don't then it's likely the task(s) won't be triggered because the calling function process can be terminated before the networks calls are sent. -### `COULD_NOT_FIND_EXECUTOR` +### `COULD_NOT_FIND_EXECUTOR` If you see a `COULD_NOT_FIND_EXECUTOR` error when triggering a task, it may be caused by dynamically importing the child task. When tasks are dynamically imported, the executor may not be properly registered. From 170fde3498f87d59f3091cecf90edd99c0f63e55 Mon Sep 17 00:00:00 2001 From: Matt Aitken Date: Wed, 11 Feb 2026 10:44:14 +0000 Subject: [PATCH 014/225] Move vouch requirement to top of CONTRIBUTING.md (#3029) Contributors need to be vouched before opening PRs, but this requirement was buried far down in the document. This change: - Adds mention of vouches in the intro paragraph - Moves the "Getting vouched" section to right after the intro This makes the requirement more visible to new contributors. Slack thread: https://triggerdotdev.slack.com/archives/C0A7Q6F62NS/p1770805895370749 https://claude.ai/code/session_01G6VVbgfUAeCpJfedELdqq1 --- Open with Devin Co-authored-by: Claude --- CONTRIBUTING.md | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fbd290f0a1d..754ad017ba9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,10 +2,23 @@ Thank you for taking the time to contribute to Trigger.dev. Your involvement is not just welcomed, but we encourage it! 🚀 -Please take some time to read this guide to understand contributing best practices for Trigger.dev. +Please take some time to read this guide to understand contributing best practices for Trigger.dev. Note that we use [vouch](https://github.com/mitchellh/vouch) to manage contributor trust, so you'll need to be vouched before opening a PR. Thank you for helping us make Trigger.dev even better! 🤩 +## Getting vouched (required before opening a PR) + +We use [vouch](https://github.com/mitchellh/vouch) to manage contributor trust. **PRs from unvouched users are automatically closed.** + +Before you open your first pull request, you need to be vouched by a maintainer. Here's how: + +1. Open a [Vouch Request](https://github.com/triggerdotdev/trigger.dev/issues/new?template=vouch-request.yml) issue. +2. Tell us what you'd like to work on and share any relevant background. +3. A maintainer will review your request and vouch for you by commenting on the issue. +4. Once vouched, your PRs will be accepted normally. + +If you're unsure whether you're already vouched, go ahead and open a PR — the check will tell you. + ## Developing The development branch is `main`. This is the branch that all pull @@ -223,19 +236,6 @@ See the [Job Catalog](./references/job-catalog/README.md) file for more. 4. Navigate to your trigger.dev instance ([http://localhost:3030](http://localhost:3030/)), to see the jobs. You can use the test feature to trigger them. -## Getting vouched (required before opening a PR) - -We use [vouch](https://github.com/mitchellh/vouch) to manage contributor trust. **PRs from unvouched users are automatically closed.** - -Before you open your first pull request, you need to be vouched by a maintainer. Here's how: - -1. Open a [Vouch Request](https://github.com/triggerdotdev/trigger.dev/issues/new?template=vouch-request.yml) issue. -2. Tell us what you'd like to work on and share any relevant background. -3. A maintainer will review your request and vouch for you by commenting on the issue. -4. Once vouched, your PRs will be accepted normally. - -If you're unsure whether you're already vouched, go ahead and open a PR — the check will tell you. - ## Making a pull request **If you get errors, be sure to fix them before committing.** From 6e3ac8bd9154aff5203d7402d6238c6f6fe3a850 Mon Sep 17 00:00:00 2001 From: DKP <8297864+D-K-P@users.noreply.github.com> Date: Wed, 11 Feb 2026 13:32:49 +0000 Subject: [PATCH 015/225] docs: cursor cli docs update (remove chmod workaround) (#3031) --- Open with Devin --- .../cursor-background-agent.mdx | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/docs/guides/example-projects/cursor-background-agent.mdx b/docs/guides/example-projects/cursor-background-agent.mdx index fa906d2136f..b05ffa0df9d 100644 --- a/docs/guides/example-projects/cursor-background-agent.mdx +++ b/docs/guides/example-projects/cursor-background-agent.mdx @@ -58,18 +58,24 @@ The task spawns the Cursor CLI as a child process and streams its output to the ### Build extension for system binaries -The example includes a custom build extension that installs the `cursor-agent` binary into the container image using `addLayer`. At runtime, the binary is copied to `/tmp` and given execute permissions; this is a workaround needed when the container runtime strips execute permissions from added layers. +The example includes a custom build extension that installs `cursor-agent` into the container image using `addLayer`. The official install script is run at build time, then the resolved entry point and its dependencies are copied to a fixed path so the task can invoke them at runtime with the bundled Node binary. ```ts extensions/cursor-cli.ts -export const cursorCli = defineExtension({ +const CURSOR_AGENT_DIR = "/usr/local/lib/cursor-agent"; + +export const cursorCli = (): BuildExtension => ({ name: "cursor-cli", - onBuildComplete(params) { - params.addLayer({ + onBuildComplete(context) { + if (context.target === "dev") return; + + context.addLayer({ id: "cursor-cli", image: { instructions: [ - `COPY cursor-agent /usr/local/bin/cursor-agent`, - `RUN chmod +x /usr/local/bin/cursor-agent`, + "RUN apt-get update && apt-get install -y curl ca-certificates && rm -rf /var/lib/apt/lists/*", + 'ENV PATH="/root/.local/bin:$PATH"', + "RUN curl -fsSL https://cursor.com/install | bash", + `RUN cp -r $(dirname $(readlink -f /root/.local/bin/cursor-agent)) ${CURSOR_AGENT_DIR}`, ], }, }); From d7bc37fdc0f90264384e1f360cbb9f997a1d8788 Mon Sep 17 00:00:00 2001 From: James Ritchie Date: Thu, 12 Feb 2026 08:58:21 +0000 Subject: [PATCH 016/225] Feat(dashboard): show the Betterstack incident title in the dashboard (#3006) When the incident panel is displayed, show the title added to BetterStack as the contents of the incident panel. I've also brightened the UI so it's more visible. CleanShot 2026-02-04 at 20 46 36@2x --- Open with Devin --- .../navigation/HelpAndFeedbackPopover.tsx | 2 +- .../webapp/app/routes/resources.incidents.tsx | 121 ++++++------ .../betterstack/betterstack.server.ts | 178 +++++++++++++----- 3 files changed, 195 insertions(+), 106 deletions(-) diff --git a/apps/webapp/app/components/navigation/HelpAndFeedbackPopover.tsx b/apps/webapp/app/components/navigation/HelpAndFeedbackPopover.tsx index 74077eed724..1626ec9f910 100644 --- a/apps/webapp/app/components/navigation/HelpAndFeedbackPopover.tsx +++ b/apps/webapp/app/components/navigation/HelpAndFeedbackPopover.tsx @@ -59,7 +59,7 @@ export function HelpAndFeedback({ button={ diff --git a/apps/webapp/app/routes/resources.incidents.tsx b/apps/webapp/app/routes/resources.incidents.tsx index 532038d4f99..445c3ef912a 100644 --- a/apps/webapp/app/routes/resources.incidents.tsx +++ b/apps/webapp/app/routes/resources.incidents.tsx @@ -1,58 +1,87 @@ import { ExclamationTriangleIcon } from "@heroicons/react/20/solid"; import { json } from "@remix-run/node"; -import { useFetcher } from "@remix-run/react"; +import { useFetcher, type ShouldRevalidateFunction } from "@remix-run/react"; import { motion } from "framer-motion"; -import { useCallback, useEffect } from "react"; +import { useEffect, useRef } from "react"; import { LinkButton } from "~/components/primitives/Buttons"; import { Paragraph } from "~/components/primitives/Paragraph"; import { Popover, PopoverContent, PopoverTrigger } from "~/components/primitives/Popover"; import { SimpleTooltip } from "~/components/primitives/Tooltip"; import { useFeatures } from "~/hooks/useFeatures"; -import { BetterStackClient } from "~/services/betterstack/betterstack.server"; +import { BetterStackClient, type AggregateState } from "~/services/betterstack/betterstack.server"; + +// Prevent Remix from revalidating this route when other fetchers submit +export const shouldRevalidate: ShouldRevalidateFunction = () => false; + +export type IncidentLoaderData = { + status: AggregateState; + title: string | null; +}; export async function loader() { const client = new BetterStackClient(); - const result = await client.getIncidents(); + const result = await client.getIncidentStatus(); if (!result.success) { - return json({ operational: true }); + return json({ status: "operational", title: null }); } - return json({ - operational: result.data.attributes.aggregate_state === "operational", + return json({ + status: result.data.status, + title: result.data.title, }); } -export function IncidentStatusPanel({ isCollapsed = false }: { isCollapsed?: boolean }) { +const DEFAULT_MESSAGE = + "Our team is working on resolving the issue. Check our status page for more information."; + +const POLL_INTERVAL_MS = 60_000; + +/** Hook to fetch and poll incident status */ +export function useIncidentStatus() { const { isManagedCloud } = useFeatures(); const fetcher = useFetcher(); - - const fetchIncidents = useCallback(() => { - if (fetcher.state === "idle") { - fetcher.load("/resources/incidents"); - } - }, []); + const hasInitiallyFetched = useRef(false); useEffect(() => { if (!isManagedCloud) return; - fetchIncidents(); + // Initial fetch on mount + if (!hasInitiallyFetched.current && fetcher.state === "idle") { + hasInitiallyFetched.current = true; + fetcher.load("/resources/incidents"); + } - const interval = setInterval(fetchIncidents, 60 * 1000); // 1 minute + // Poll every 60 seconds + const interval = setInterval(() => { + if (fetcher.state === "idle") { + fetcher.load("/resources/incidents"); + } + }, POLL_INTERVAL_MS); return () => clearInterval(interval); - }, [isManagedCloud, fetchIncidents]); + }, [isManagedCloud]); + + return { + status: fetcher.data?.status ?? "operational", + title: fetcher.data?.title ?? null, + hasIncident: (fetcher.data?.status ?? "operational") !== "operational", + isManagedCloud, + }; +} - const operational = fetcher.data?.operational ?? true; +export function IncidentStatusPanel({ isCollapsed = false }: { isCollapsed?: boolean }) { + const { title, hasIncident, isManagedCloud } = useIncidentStatus(); - if (!isManagedCloud || operational) { + if (!isManagedCloud || !hasIncident) { return null; } + const message = title || DEFAULT_MESSAGE; + return (
- {/* Expanded panel - animated height and opacity */} -
- {/* Header */} -
- - - Active incident - -
- - {/* Description */} - - Our team is working on resolving the issue. Check our status page for more - information. - - - {/* Button */} - - View status page - -
+
- {/* Collapsed button - animated height and opacity */} - + + } content="Active incident" @@ -115,32 +118,32 @@ export function IncidentStatusPanel({ isCollapsed = false }: { isCollapsed?: boo
- +
); } -function IncidentPopoverContent() { +function IncidentPanelContent({ message }: { message: string }) { return ( -
-
- - +
+
+ + Active incident
- - Our team is working on resolving the issue. Check our status page for more information. + + {message} - View status page + View status page
); diff --git a/apps/webapp/app/services/betterstack/betterstack.server.ts b/apps/webapp/app/services/betterstack/betterstack.server.ts index 75b404745a7..95fe2208836 100644 --- a/apps/webapp/app/services/betterstack/betterstack.server.ts +++ b/apps/webapp/app/services/betterstack/betterstack.server.ts @@ -1,26 +1,56 @@ -import { type ApiResult, wrapZodFetch } from "@trigger.dev/core/v3/zodfetch"; +import { wrapZodFetch } from "@trigger.dev/core/v3/zodfetch"; import { createCache, DefaultStatefulContext, Namespace } from "@unkey/cache"; import { createLRUMemoryStore } from "@internal/cache"; import { z } from "zod"; import { env } from "~/env.server"; -const IncidentSchema = z.object({ +const StatusPageSchema = z.object({ data: z.object({ id: z.string(), type: z.string(), attributes: z.object({ - aggregate_state: z.string(), + aggregate_state: z.enum(["operational", "degraded", "downtime"]), }), }), }); -export type Incident = z.infer; +const StatusReportsSchema = z.object({ + data: z.array( + z.object({ + id: z.string(), + type: z.literal("status_report"), + attributes: z.object({ + title: z.string().nullable(), + starts_at: z.string().nullable(), + ends_at: z.string().nullable(), + aggregate_state: z.string().nullable(), + }), + }) + ), + pagination: z.object({ + first: z.string().nullable(), + last: z.string().nullable(), + prev: z.string().nullable(), + next: z.string().nullable(), + }), +}); + +export type AggregateState = "operational" | "degraded" | "downtime"; + +export type IncidentStatus = { + status: AggregateState; + title: string | null; +}; + +type CachedResult = + | { success: true; data: IncidentStatus } + | { success: false; error: unknown }; const ctx = new DefaultStatefulContext(); const memory = createLRUMemoryStore(100); const cache = createCache({ - query: new Namespace>(ctx, { + query: new Namespace(ctx, { stores: [memory], fresh: 15_000, stale: 30_000, @@ -30,59 +60,115 @@ const cache = createCache({ export class BetterStackClient { private readonly baseUrl = "https://uptime.betterstack.com/api/v2"; - async getIncidents() { + async getIncidentStatus(): Promise { const apiKey = env.BETTERSTACK_API_KEY; - if (!apiKey) { - return { success: false as const, error: "BETTERSTACK_API_KEY is not set" }; + const statusPageId = env.BETTERSTACK_STATUS_PAGE_ID; + + if (!apiKey || !statusPageId) { + return { success: false, error: "Missing BetterStack configuration" }; } - const statusPageId = env.BETTERSTACK_STATUS_PAGE_ID; - if (!statusPageId) { - return { success: false as const, error: "BETTERSTACK_STATUS_PAGE_ID is not set" }; + const cachedResult = await cache.query.swr("betterstack-incident-status", () => + this.fetchIncidentStatus(apiKey, statusPageId) + ); + + if (cachedResult.err || !cachedResult.val) { + return { success: false, error: cachedResult.err ?? "No result from cache" }; } - const cachedResult = await cache.query.swr("betterstack", async () => { - try { - const result = await wrapZodFetch( - IncidentSchema, - `${this.baseUrl}/status-pages/${statusPageId}`, - { - headers: { - Authorization: `Bearer ${apiKey}`, - "Content-Type": "application/json", - }, - }, - { - retry: { - maxAttempts: 3, - minTimeoutInMs: 1000, - maxTimeoutInMs: 5000, - }, - } - ); - - return result; - } catch (error) { - console.error("Failed to fetch incidents from BetterStack:", error); - return { - success: false as const, - error: error instanceof Error ? error.message : "Unknown error", - }; + return cachedResult.val; + } + + private async fetchIncidentStatus( + apiKey: string, + statusPageId: string + ): Promise { + const headers = { + Authorization: `Bearer ${apiKey}`, + "Content-Type": "application/json", + }; + const retryConfig = { + retry: { maxAttempts: 3, minTimeoutInMs: 1000, maxTimeoutInMs: 5000 }, + }; + + try { + // Fetch the status page to get aggregate state + const statusPageResult = await wrapZodFetch( + StatusPageSchema, + `${this.baseUrl}/status-pages/${statusPageId}`, + { headers }, + retryConfig + ); + + if (!statusPageResult.success) { + return { success: false, error: statusPageResult.error }; + } + + const status = statusPageResult.data.data.attributes.aggregate_state; + + // If operational, no need to fetch reports + if (status === "operational") { + return { success: true, data: { status, title: null } }; } - }); - if (cachedResult.err) { - return { success: false as const, error: cachedResult.err }; + // Fetch status reports to get the incident title + const title = await this.fetchActiveReportTitle(apiKey, statusPageId, headers, retryConfig); + + return { success: true, data: { status, title } }; + } catch (error) { + console.error("Failed to fetch incident status from BetterStack:", error); + return { + success: false, + error: error instanceof Error ? error.message : "Unknown error", + }; + } + } + + private async fetchActiveReportTitle( + apiKey: string, + statusPageId: string, + headers: Record, + retryConfig: { retry: { maxAttempts: number; minTimeoutInMs: number; maxTimeoutInMs: number } } + ): Promise { + const reportsUrl = `${this.baseUrl}/status-pages/${statusPageId}/status-reports`; + + let reportsResult = await wrapZodFetch( + StatusReportsSchema, + reportsUrl, + { headers }, + retryConfig + ); + + if (!reportsResult.success) { + return null; } - if (!cachedResult.val) { - return { success: false as const, error: "No result from BetterStack" }; + // Fetch last page if there are multiple pages (most recent reports are at the end) + const { first, last } = reportsResult.data.pagination; + if (last && last !== first) { + const lastPageResult = await wrapZodFetch( + StatusReportsSchema, + last, + { headers }, + retryConfig + ); + if (lastPageResult.success) { + reportsResult = lastPageResult; + } } - if (!cachedResult.val.success) { - return { success: false as const, error: cachedResult.val.error }; + // Find active reports (not resolved, not ended) + const activeReports = reportsResult.data.data.filter( + (report) => + report.attributes.aggregate_state !== "resolved" && report.attributes.ends_at === null + ); + + if (activeReports.length === 0) { + return null; } - return { success: true as const, data: cachedResult.val.data.data }; + // Return the title from the most recent active report + const mostRecent = activeReports[activeReports.length - 1]; + return mostRecent.attributes.title; } } From c2085e6cc67fa83fddebc59bde942836b6eac99a Mon Sep 17 00:00:00 2001 From: nicktrn <55853254+nicktrn@users.noreply.github.com> Date: Thu, 12 Feb 2026 15:31:52 +0000 Subject: [PATCH 017/225] feat(dashboard): link git sha and ref to GitHub on settings page (#3034) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Make the git SHA and git ref in the org settings sidebar clickable links to GitHub — SHA links to the commit, ref links to the branch/tag. --- .../OrganizationSettingsSideMenu.tsx | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx b/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx index 8758e181ff8..9069620c92b 100644 --- a/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx +++ b/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx @@ -141,7 +141,14 @@ export function OrganizationSettingsSideMenu({
- {buildInfo.gitRefName} + + {buildInfo.gitRefName} +
)} @@ -149,7 +156,14 @@ export function OrganizationSettingsSideMenu({
- {buildInfo.gitSha.slice(0, 9)} + + {buildInfo.gitSha.slice(0, 9)} +
)} From 062bcaece8ad7f1046097977efab18c1fcc0ee42 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 12 Feb 2026 16:14:25 +0000 Subject: [PATCH 018/225] feat(mcp): add timeout parameter to wait_for_run_to_complete tool (#3035) ## Summary - Adds an optional `timeoutInSeconds` parameter (default 60s) to the `wait_for_run_to_complete` MCP tool - If the run doesn't complete within the timeout, returns the current run state instead of blocking indefinitely - Uses `AbortSignal.timeout()` combined with the existing MCP signal Fixes #3032 --- .changeset/mcp-wait-timeout.md | 5 +++ packages/cli-v3/src/mcp/config.ts | 2 +- packages/cli-v3/src/mcp/schemas.ts | 11 +++++++ packages/cli-v3/src/mcp/tools/runs.ts | 47 +++++++++++++++++++-------- 4 files changed, 50 insertions(+), 15 deletions(-) create mode 100644 .changeset/mcp-wait-timeout.md diff --git a/.changeset/mcp-wait-timeout.md b/.changeset/mcp-wait-timeout.md new file mode 100644 index 00000000000..02d6c982316 --- /dev/null +++ b/.changeset/mcp-wait-timeout.md @@ -0,0 +1,5 @@ +--- +"trigger.dev": patch +--- + +Add optional `timeoutInSeconds` parameter to the `wait_for_run_to_complete` MCP tool. Defaults to 60 seconds. If the run doesn't complete within the timeout, the current state of the run is returned instead of waiting indefinitely. diff --git a/packages/cli-v3/src/mcp/config.ts b/packages/cli-v3/src/mcp/config.ts index 206b5910fa5..5a1ec45cba1 100644 --- a/packages/cli-v3/src/mcp/config.ts +++ b/packages/cli-v3/src/mcp/config.ts @@ -68,7 +68,7 @@ export const toolsMetadata = { name: "wait_for_run_to_complete", title: "Wait for Run to Complete", description: - "Wait for a run to complete. The run ID is the ID of the run that was triggered. It starts with run_", + "Wait for a run to complete. The run ID is the ID of the run that was triggered. It starts with run_. Has an optional timeoutInSeconds parameter (default 60s) - if the run doesn't complete within that time, the current state of the run will be returned.", }, cancel_run: { name: "cancel_run", diff --git a/packages/cli-v3/src/mcp/schemas.ts b/packages/cli-v3/src/mcp/schemas.ts index b98faca0dab..8afb10f38f5 100644 --- a/packages/cli-v3/src/mcp/schemas.ts +++ b/packages/cli-v3/src/mcp/schemas.ts @@ -123,6 +123,17 @@ export const CommonRunsInput = CommonProjectsInput.extend({ export type CommonRunsInput = z.output; +export const WaitForRunInput = CommonRunsInput.extend({ + timeoutInSeconds: z + .number() + .describe( + "The maximum time in seconds to wait for the run to complete. If the run doesn't complete within this time, the current state of the run will be returned. Defaults to 60 seconds." + ) + .default(60), +}); + +export type WaitForRunInput = z.output; + export const GetRunDetailsInput = CommonRunsInput.extend({ maxTraceLines: z .number() diff --git a/packages/cli-v3/src/mcp/tools/runs.ts b/packages/cli-v3/src/mcp/tools/runs.ts index 13fe601da0e..056544e3cdb 100644 --- a/packages/cli-v3/src/mcp/tools/runs.ts +++ b/packages/cli-v3/src/mcp/tools/runs.ts @@ -1,7 +1,7 @@ import { AnyRunShape } from "@trigger.dev/core/v3"; import { toolsMetadata } from "../config.js"; import { formatRun, formatRunList, formatRunShape, formatRunTrace } from "../formatters.js"; -import { CommonRunsInput, GetRunDetailsInput, ListRunsInput } from "../schemas.js"; +import { CommonRunsInput, GetRunDetailsInput, ListRunsInput, WaitForRunInput } from "../schemas.js"; import { respondWithError, toolHandler } from "../utils.js"; export const getRunDetailsTool = { @@ -65,8 +65,8 @@ export const waitForRunToCompleteTool = { name: toolsMetadata.wait_for_run_to_complete.name, title: toolsMetadata.wait_for_run_to_complete.title, description: toolsMetadata.wait_for_run_to_complete.description, - inputSchema: CommonRunsInput.shape, - handler: toolHandler(CommonRunsInput.shape, async (input, { ctx, signal }) => { + inputSchema: WaitForRunInput.shape, + handler: toolHandler(WaitForRunInput.shape, async (input, { ctx, signal }) => { ctx.logger?.log("calling wait_for_run_to_complete", { input }); if (ctx.options.devOnly && input.environment !== "dev") { @@ -87,20 +87,35 @@ export const waitForRunToCompleteTool = { branch: input.branch, }); - const runSubscription = apiClient.subscribeToRun(input.runId, { signal }); + const timeoutMs = input.timeoutInSeconds * 1000; + const timeoutSignal = AbortSignal.timeout(timeoutMs); + const combinedSignal = signal + ? AbortSignal.any([signal, timeoutSignal]) + : timeoutSignal; + + const runSubscription = apiClient.subscribeToRun(input.runId, { signal: combinedSignal }); const readableStream = runSubscription.getReader(); let run: AnyRunShape | null = null; - - while (true) { - const { done, value } = await readableStream.read(); - if (done) { - break; + let timedOut = false; + + try { + while (true) { + const { done, value } = await readableStream.read(); + if (done) { + break; + } + run = value; + + if (value.isCompleted) { + break; + } } - run = value; - - if (value.isCompleted) { - break; + } catch (error) { + if (timeoutSignal.aborted) { + timedOut = true; + } else { + throw error; } } @@ -108,8 +123,12 @@ export const waitForRunToCompleteTool = { return respondWithError("Run not found"); } + const prefix = timedOut + ? `Timed out after ${input.timeoutInSeconds}s. Returning current run state:\n\n` + : ""; + return { - content: [{ type: "text", text: formatRunShape(run) }], + content: [{ type: "text", text: prefix + formatRunShape(run) }], }; }), }; From bc0d1ff59a8152b303ca7f30fa7b2be0b98646c5 Mon Sep 17 00:00:00 2001 From: Matt Aitken Date: Thu, 12 Feb 2026 17:48:02 +0000 Subject: [PATCH 019/225] Metrics dashboards (#3019) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary - Implemented metrics dashboards with a built-in dashboard and custom dashboards - Added a "Big number” display type What changed - New data format for metric layouts and saving/editing layouts (editing, saving, cancel revert) - QueryWidget usable on Query page and Metrics dashboards - Time filtering, auto-reloading and timeBucket() auto-bin support - Filters added to metrics; widget popover/improved history and blank states - Side menu: - Metrics/Insights section with icons, colors, padding, collapsible behavior and reordering of custom dashboards - Move action logic into service for reuse and API querying; refactor reordering for reuse --- Open with Devin --------- Co-authored-by: James Ritchie --- .vscode/settings.json | 1 - apps/webapp/app/components/AlphaBadge.tsx | 29 + .../app/components/code/AIQueryInput.tsx | 149 +- .../app/components/code/ChartConfigPanel.tsx | 204 ++- .../app/components/code/QueryResultsChart.tsx | 359 ++-- .../webapp/app/components/code/TSQLEditor.tsx | 43 +- .../app/components/code/TSQLResultsTable.tsx | 122 +- .../webapp/app/components/code/chartColors.ts | 183 +++ .../components/code/tsql/tsqlCompletion.ts | 10 + .../app/components/layout/AppLayout.tsx | 2 +- .../app/components/logs/LogsTaskFilter.tsx | 4 +- .../app/components/metrics/QueryWidget.tsx | 496 ++++++ .../app/components/metrics/QueuesFilter.tsx | 212 +++ .../metrics/SaveToDashboardDialog.tsx | 177 ++ .../app/components/metrics/ScopeFilter.tsx | 64 + .../app/components/metrics/TitleWidget.tsx | 125 ++ .../navigation/DashboardDialogs.tsx | 255 +++ .../components/navigation/DashboardList.tsx | 123 ++ .../app/components/navigation/SideMenu.tsx | 685 ++++---- .../components/navigation/SideMenuItem.tsx | 112 +- .../components/navigation/SideMenuSection.tsx | 28 +- .../components/navigation/TreeConnectors.tsx | 29 + .../components/navigation/sideMenuTypes.ts | 7 + .../navigation/useReorderableList.ts | 129 ++ .../components/primitives/AppliedFilter.tsx | 16 +- .../app/components/primitives/ClientTabs.tsx | 3 +- .../app/components/primitives/FormButtons.tsx | 4 +- .../primitives/LoadingBarDivider.tsx | 6 +- .../app/components/primitives/Popover.tsx | 27 +- .../app/components/primitives/Resizable.tsx | 6 +- .../app/components/primitives/Tooltip.tsx | 2 +- .../primitives/charts/BigNumber.tsx | 46 - .../primitives/charts/BigNumberCard.tsx | 171 ++ .../app/components/primitives/charts/Card.tsx | 17 +- .../components/primitives/charts/ChartBar.tsx | 41 +- .../primitives/charts/ChartLegendCompound.tsx | 56 +- .../primitives/charts/ChartLine.tsx | 28 +- .../app/components/query/QueryEditor.tsx | 1457 +++++++++++++++++ .../app/components/runs/v3/SharedFilters.tsx | 103 +- .../app/components/runs/v3/TaskRunStatus.tsx | 39 + apps/webapp/app/env.server.ts | 4 + apps/webapp/app/hooks/useDashboardEditor.ts | 515 ++++++ apps/webapp/app/hooks/useElementVisibility.ts | 35 + apps/webapp/app/hooks/useInterval.ts | 63 + apps/webapp/app/hooks/useOrganizations.ts | 26 + apps/webapp/app/hooks/useRevalidateOnParam.ts | 57 + .../app/models/runtimeEnvironment.server.ts | 23 + .../presenters/v3/BuiltInDashboards.server.ts | 225 +++ .../presenters/v3/LimitsPresenter.server.ts | 45 + .../v3/MetricDashboardPresenter.server.ts | 123 ++ .../route.tsx | 24 +- .../route.tsx | 295 ++++ .../route.tsx | 772 +++++++++ .../AITabContent.tsx | 14 +- .../ExamplesContent.tsx | 13 + .../QueryHistoryPopover.tsx | 51 +- .../TRQLGuideContent.tsx | 5 + .../route.tsx | 960 +---------- .../_app.orgs.$organizationSlug/route.tsx | 51 +- apps/webapp/app/routes/resources.metric.tsx | 283 ++++ ...vParam.dashboards.$dashboardId.widgets.tsx | 492 ++++++ ...tParam.env.$envParam.dashboards.create.tsx | 84 + ...ces.orgs.$organizationSlug.select-plan.tsx | 69 +- .../routes/resources.preferences.sidemenu.tsx | 47 +- .../app/routes/storybook.charts/route.tsx | 8 +- .../app/services/clickhouseInstance.server.ts | 30 +- .../services/dashboardPreferences.server.ts | 95 +- .../app/services/queryService.server.ts | 194 ++- apps/webapp/app/tailwind.css | 40 + apps/webapp/app/utils/pathBuilder.ts | 22 +- apps/webapp/app/v3/querySchemas.ts | 5 + .../app/v3/services/aiQueryService.server.ts | 57 +- apps/webapp/package.json | 4 +- apps/webapp/tailwind.config.js | 6 + .../clickhouse/src/client/tsql.ts | 10 +- .../migration.sql | 25 + .../migration.sql | 3 + .../migration.sql | 5 + .../migration.sql | 2 + .../database/prisma/schema.prisma | 147 +- internal-packages/tsql/src/index.ts | 42 +- .../tsql/src/query/printer.test.ts | 285 +++- internal-packages/tsql/src/query/printer.ts | 117 +- .../tsql/src/query/printer_context.ts | 47 +- internal-packages/tsql/src/query/schema.ts | 18 + .../tsql/src/query/time_buckets.test.ts | 181 ++ .../tsql/src/query/time_buckets.ts | 86 + internal-packages/tsql/src/query/validator.ts | 7 +- pnpm-lock.yaml | 64 +- 89 files changed, 9403 insertions(+), 1943 deletions(-) create mode 100644 apps/webapp/app/components/code/chartColors.ts create mode 100644 apps/webapp/app/components/metrics/QueryWidget.tsx create mode 100644 apps/webapp/app/components/metrics/QueuesFilter.tsx create mode 100644 apps/webapp/app/components/metrics/SaveToDashboardDialog.tsx create mode 100644 apps/webapp/app/components/metrics/ScopeFilter.tsx create mode 100644 apps/webapp/app/components/metrics/TitleWidget.tsx create mode 100644 apps/webapp/app/components/navigation/DashboardDialogs.tsx create mode 100644 apps/webapp/app/components/navigation/DashboardList.tsx create mode 100644 apps/webapp/app/components/navigation/TreeConnectors.tsx create mode 100644 apps/webapp/app/components/navigation/sideMenuTypes.ts create mode 100644 apps/webapp/app/components/navigation/useReorderableList.ts delete mode 100644 apps/webapp/app/components/primitives/charts/BigNumber.tsx create mode 100644 apps/webapp/app/components/primitives/charts/BigNumberCard.tsx create mode 100644 apps/webapp/app/components/query/QueryEditor.tsx create mode 100644 apps/webapp/app/hooks/useDashboardEditor.ts create mode 100644 apps/webapp/app/hooks/useElementVisibility.ts create mode 100644 apps/webapp/app/hooks/useInterval.ts create mode 100644 apps/webapp/app/hooks/useRevalidateOnParam.ts create mode 100644 apps/webapp/app/presenters/v3/BuiltInDashboards.server.ts create mode 100644 apps/webapp/app/presenters/v3/MetricDashboardPresenter.server.ts create mode 100644 apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.metrics.$dashboardKey/route.tsx create mode 100644 apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.metrics.custom.$dashboardId/route.tsx create mode 100644 apps/webapp/app/routes/resources.metric.tsx create mode 100644 apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.dashboards.$dashboardId.widgets.tsx create mode 100644 apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.dashboards.create.tsx create mode 100644 internal-packages/database/prisma/migrations/20260201130503_metrics_dashboard_table_created/migration.sql create mode 100644 internal-packages/database/prisma/migrations/20260202044337_metrics_dashboard_description/migration.sql create mode 100644 internal-packages/database/prisma/migrations/20260202100000_add_friendlyid_to_metrics_dashboard/migration.sql create mode 100644 internal-packages/database/prisma/migrations/20260211120000_make_metrics_dashboard_owner_nullable/migration.sql create mode 100644 internal-packages/tsql/src/query/time_buckets.test.ts create mode 100644 internal-packages/tsql/src/query/time_buckets.ts diff --git a/.vscode/settings.json b/.vscode/settings.json index 382a5ae6201..fd9f3dcde0c 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -7,6 +7,5 @@ "packages/cli-v3/e2e": true }, "vitest.disableWorkspaceWarning": true, - "typescript.experimental.useTsgo": true, "chat.agent.maxRequests": 10000 } diff --git a/apps/webapp/app/components/AlphaBadge.tsx b/apps/webapp/app/components/AlphaBadge.tsx index 58da1a994cd..0a1c4a7fc9a 100644 --- a/apps/webapp/app/components/AlphaBadge.tsx +++ b/apps/webapp/app/components/AlphaBadge.tsx @@ -30,3 +30,32 @@ export function AlphaTitle({ children }: { children: React.ReactNode }) { ); } + +export function BetaBadge({ + inline = false, + className, +}: { + inline?: boolean; + className?: string; +}) { + return ( + + Beta + + } + content="This feature is in Beta." + disableHoverableContent + /> + ); +} + +export function BetaTitle({ children }: { children: React.ReactNode }) { + return ( + <> + {children} + + + ); +} diff --git a/apps/webapp/app/components/code/AIQueryInput.tsx b/apps/webapp/app/components/code/AIQueryInput.tsx index 38d0c9b21b1..0775ec2c2a0 100644 --- a/apps/webapp/app/components/code/AIQueryInput.tsx +++ b/apps/webapp/app/components/code/AIQueryInput.tsx @@ -1,7 +1,13 @@ -import { PencilSquareIcon, PlusIcon, SparklesIcon } from "@heroicons/react/20/solid"; +import { CheckIcon, PencilSquareIcon, PlusIcon, XMarkIcon } from "@heroicons/react/20/solid"; import { AnimatePresence, motion } from "framer-motion"; import { Suspense, lazy, useCallback, useEffect, useRef, useState } from "react"; -import { AISparkleIcon } from "~/assets/icons/AISparkleIcon"; +import { Button } from "~/components/primitives/Buttons"; +import { Spinner } from "~/components/primitives/Spinner"; +import { useEnvironment } from "~/hooks/useEnvironment"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { useProject } from "~/hooks/useProject"; +import type { AITimeFilter } from "~/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.query/types"; +import { cn } from "~/utils/cn"; // Lazy load streamdown components to avoid SSR issues const StreamdownRenderer = lazy(() => @@ -13,13 +19,6 @@ const StreamdownRenderer = lazy(() => ), })) ); -import { Button } from "~/components/primitives/Buttons"; -import { Spinner } from "~/components/primitives/Spinner"; -import { useEnvironment } from "~/hooks/useEnvironment"; -import { useOrganization } from "~/hooks/useOrganizations"; -import { useProject } from "~/hooks/useProject"; -import type { AITimeFilter } from "~/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.query/types"; -import { cn } from "~/utils/cn"; type StreamEventType = | { type: "thinking"; content: string } @@ -179,21 +178,7 @@ export function AIQueryInput({ setThinking((prev) => prev + event.content); break; case "tool_call": - if (event.tool === "setTimeFilter") { - setThinking((prev) => { - if (prev.trimEnd().endsWith("Setting time filter...")) { - return prev; - } - return prev + `\nSetting time filter...\n`; - }); - } else { - setThinking((prev) => { - if (prev.trimEnd().endsWith("Validating query...")) { - return prev; - } - return prev + `\nValidating query...\n`; - }); - } + // Tool calls are handled silently — no UI text needed break; case "time_filter": // Apply time filter immediately when the AI sets it @@ -262,13 +247,13 @@ export function AIQueryInput({ }, [error]); return ( -
+
{/* Gradient border wrapper like the schedules AI input */}
-
+