diff --git a/.github/prompts/bug-report-review.prompt.yml b/.github/prompts/bug-report-review.prompt.yml index 23c4bf70d2..ccb95eff0c 100644 --- a/.github/prompts/bug-report-review.prompt.yml +++ b/.github/prompts/bug-report-review.prompt.yml @@ -5,26 +5,38 @@ messages: Your job is to analyze bug reports and assess their completeness. + **CRITICAL: Detect unfilled templates** + - Flag issues containing unmodified template text like "A clear and concise description of what the bug is" + - Flag placeholder values like "Type this '...'" or "View the output '....'" that haven't been replaced + - Flag generic/meaningless titles (e.g., random words, test content) + - These are ALWAYS "Missing Details" even if the template structure is present + Analyze the issue for these key elements: - 1. Clear description of the problem + 1. Clear description of the problem (not template text) 2. Affected version (from running `docker run -i --rm ghcr.io/github/github-mcp-server ./github-mcp-server --version`) - 3. Steps to reproduce the behavior - 4. Expected vs actual behavior + 3. Steps to reproduce the behavior (actual steps, not placeholders) + 4. Expected vs actual behavior (real descriptions, not template text) 5. Relevant logs (if applicable) Provide ONE of these assessments: ### AI Assessment: Ready for Review - Use when the bug report has most required information and can be triaged by a maintainer. + Use when the bug report has actual information in required fields and can be triaged by a maintainer. ### AI Assessment: Missing Details - Use when critical information is missing (no reproduction steps, no version info, unclear problem description). + Use when: + - Template text has not been replaced with actual content + - Critical information is missing (no reproduction steps, no version info, unclear problem description) + - The title is meaningless or spam-like + - Placeholder text remains in any section + + When marking as Missing Details, recommend adding the "waiting-for-reply" label. ### AI Assessment: Unsure Use when you cannot determine the completeness of the report. After your assessment header, provide a brief explanation of your rating. - If details are missing, note which specific sections need more information. + If details are missing, be specific about which sections contain template text or need actual information. - role: user content: "{{input}}" model: openai/gpt-4o-mini diff --git a/.github/prompts/default-issue-review.prompt.yml b/.github/prompts/default-issue-review.prompt.yml index 6b4cd4a2bd..a574c9d89b 100644 --- a/.github/prompts/default-issue-review.prompt.yml +++ b/.github/prompts/default-issue-review.prompt.yml @@ -5,24 +5,47 @@ messages: Your job is to analyze new issues and help categorize them. + **CRITICAL: Detect invalid or incomplete submissions** + - Flag issues with unmodified template text (e.g., "A clear and concise description...") + - Flag placeholder values that haven't been replaced (e.g., "Type this '...'", "....", "XXX") + - Flag meaningless, spam-like, or test titles (e.g., random words, nonsensical content) + - Flag empty or nearly empty issues + - These are ALWAYS "Missing Details" or "Invalid" depending on severity + Analyze the issue to determine: - 1. Is this a bug report, feature request, question, or something else? - 2. Is the issue clear and well-described? + 1. Is this a bug report, feature request, question, documentation issue, or something else? + 2. Is the issue clear and well-described with actual content (not template text)? 3. Does it contain enough information for maintainers to act on? + 4. Is this potentially spam, a test issue, or completely invalid? Provide ONE of these assessments: ### AI Assessment: Ready for Review - Use when the issue is clear, well-described, and contains enough context for maintainers to understand and act on it. + Use when the issue is clear, well-described with actual content, and contains enough context for maintainers to understand and act on it. ### AI Assessment: Missing Details - Use when the issue is unclear, lacks context, or needs more information to be actionable. + Use when: + - Template text has not been replaced with actual content + - The issue is unclear or lacks context + - Critical information is missing to make it actionable + - The title is vague but the issue seems legitimate + + When marking as Missing Details, recommend adding the "waiting-for-reply" label. + + ### AI Assessment: Invalid + Use when: + - The issue appears to be spam or test content + - The title is completely meaningless and body has no useful information + - This doesn't relate to the GitHub MCP Server project at all + + When marking as Invalid, recommend adding the "invalid" label and consider closing. ### AI Assessment: Unsure Use when you cannot determine the nature or completeness of the issue. After your assessment header, provide a brief explanation including: - - What type of issue this appears to be (bug, feature request, question, etc.) + - What type of issue this appears to be (bug, feature request, question, invalid, etc.) + - Which specific sections contain template text or need actual information - What additional information might be helpful if any - role: user content: "{{input}}" diff --git a/.github/workflows/code-scanning.yml b/.github/workflows/code-scanning.yml index 02c19fc77e..453a7b7e65 100644 --- a/.github/workflows/code-scanning.yml +++ b/.github/workflows/code-scanning.yml @@ -74,6 +74,18 @@ jobs: go-version: ${{ fromJSON(steps.resolve-environment.outputs.environment).configuration.go.version }} cache: false + - name: Set up Node.js + if: matrix.language == 'go' + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + cache-dependency-path: ui/package-lock.json + + - name: Build UI + if: matrix.language == 'go' + run: script/build-ui + - name: Autobuild uses: github/codeql-action/autobuild@v4 diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 43eca9fad4..de53eb0aae 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -60,7 +60,7 @@ jobs: # https://github.com/docker/login-action - name: Log into registry ${{ env.REGISTRY }} if: github.event_name != 'pull_request' - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} @@ -93,9 +93,14 @@ jobs: key: ${{ runner.os }}-go-build-cache-${{ hashFiles('**/go.sum') }} - name: Inject go-build-cache - uses: reproducible-containers/buildkit-cache-dance@4b2444fec0c0fb9dbf175a96c094720a692ef810 # v2.1.4 + uses: reproducible-containers/buildkit-cache-dance@6f699a72a59e4252f05a7435430009b77e25fe06 # v3.3.1 with: - cache-source: go-build-cache + cache-map: | + { + "go-build-cache/apk": "/var/cache/apk", + "go-build-cache/pkg": "/go/pkg/mod", + "go-build-cache/build": "/root/.cache/go-build" + } # Build and push Docker image with Buildx (don't push on PR) # https://github.com/docker/build-push-action diff --git a/.github/workflows/docs-check.yml b/.github/workflows/docs-check.yml index 5084a78a1d..de62d6282c 100644 --- a/.github/workflows/docs-check.yml +++ b/.github/workflows/docs-check.yml @@ -16,6 +16,16 @@ jobs: - name: Checkout code uses: actions/checkout@v6 + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + cache-dependency-path: ui/package-lock.json + + - name: Build UI + run: script/build-ui + - name: Set up Go uses: actions/setup-go@v6 with: diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 181a99560e..f874b2b59d 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -25,6 +25,17 @@ jobs: - name: Check out code uses: actions/checkout@v6 + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + cache-dependency-path: ui/package-lock.json + + - name: Build UI + shell: bash + run: script/build-ui + - name: Set up Go uses: actions/setup-go@v6 with: @@ -34,6 +45,7 @@ jobs: run: go mod tidy -diff - name: Run unit tests + shell: bash run: script/test - name: Build diff --git a/.github/workflows/goreleaser.yml b/.github/workflows/goreleaser.yml index 167760cba8..f8eddc076c 100644 --- a/.github/workflows/goreleaser.yml +++ b/.github/workflows/goreleaser.yml @@ -16,6 +16,16 @@ jobs: - name: Check out code uses: actions/checkout@v6 + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + cache-dependency-path: ui/package-lock.json + + - name: Build UI + run: script/build-ui + - name: Set up Go uses: actions/setup-go@v6 with: diff --git a/.github/workflows/license-check.yml b/.github/workflows/license-check.yml index 9407732759..8726f82530 100644 --- a/.github/workflows/license-check.yml +++ b/.github/workflows/license-check.yml @@ -32,6 +32,16 @@ jobs: GH_TOKEN: ${{ github.token }} run: gh pr checkout ${{ github.event.pull_request.number }} + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + cache-dependency-path: ui/package-lock.json + + - name: Build UI + run: script/build-ui + - name: Set up Go uses: actions/setup-go@v6 with: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index a1647446f4..3676cb4103 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -14,10 +14,18 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 + - uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + cache-dependency-path: ui/package-lock.json + - name: Build UI + run: script/build-ui - uses: actions/setup-go@v6 with: - go-version: stable + go-version: '1.25' - name: golangci-lint uses: golangci/golangci-lint-action@v9 with: - version: v2.5 + # sync with script/lint + version: v2.9 diff --git a/.github/workflows/mcp-diff.yml b/.github/workflows/mcp-diff.yml index ba9b59c6e1..3c6c0149a8 100644 --- a/.github/workflows/mcp-diff.yml +++ b/.github/workflows/mcp-diff.yml @@ -19,6 +19,14 @@ jobs: with: fetch-depth: 0 + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Build UI + run: script/build-ui + - name: Run MCP Server Diff uses: SamMorrowDrums/mcp-server-diff@v2.3.5 with: diff --git a/.gitignore b/.gitignore index eedf65165b..8d5d8b7ea2 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,12 @@ e2e.test .history conformance-report/ + +# UI build artifacts +ui/dist/ +ui/node_modules/ + +# Embedded UI assets (built from ui/) +pkg/github/ui_dist/* +!pkg/github/ui_dist/.gitkeep +!pkg/github/ui_dist/.placeholder.html \ No newline at end of file diff --git a/.golangci.yml b/.golangci.yml index 6891db89e2..a32fc897e8 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -9,12 +9,14 @@ linters: - gosec - makezero - misspell + - modernize - nakedret - revive - errcheck - staticcheck - govet - ineffassign + - intrange - unused exclusions: generated: lax @@ -27,6 +29,11 @@ linters: - third_party$ - builtin$ - examples$ + - internal/githubv4mock + rules: + - linters: + - revive + text: "var-naming: avoid package names that conflict with Go standard library package names" settings: staticcheck: checks: diff --git a/.vscode/launch.json b/.vscode/launch.json index cea7fd917d..0d90e162a6 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -23,6 +23,16 @@ "program": "cmd/github-mcp-server/main.go", "args": ["stdio", "--read-only"], "console": "integratedTerminal", + }, + { + "name": "Launch http server", + "type": "go", + "request": "launch", + "mode": "auto", + "cwd": "${workspaceFolder}", + "program": "cmd/github-mcp-server/main.go", + "args": ["http", "--port", "8082"], + "console": "integratedTerminal", } ] } \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index f804c03aac..90c8b40079 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,13 @@ -FROM golang:1.25.6-alpine AS build +FROM node:20-alpine@sha256:09e2b3d9726018aecf269bd35325f46bf75046a643a66d28360ec71132750ec8 AS ui-build +WORKDIR /app +COPY ui/package*.json ./ui/ +RUN cd ui && npm ci +COPY ui/ ./ui/ +# Create output directory and build - vite outputs directly to pkg/github/ui_dist/ +RUN mkdir -p ./pkg/github/ui_dist && \ + cd ui && npm run build + +FROM golang:1.25.7-alpine@sha256:f6751d823c26342f9506c03797d2527668d095b0a15f1862cddb4d927a7a4ced AS build ARG VERSION="dev" # Set the working directory @@ -8,16 +17,20 @@ WORKDIR /build RUN --mount=type=cache,target=/var/cache/apk \ apk add git +# Copy source code (including ui_dist placeholder) +COPY . . + +# Copy built UI assets over the placeholder +COPY --from=ui-build /app/pkg/github/ui_dist/* ./pkg/github/ui_dist/ + # Build the server -# go build automatically download required module dependencies to /go/pkg/mod RUN --mount=type=cache,target=/go/pkg/mod \ --mount=type=cache,target=/root/.cache/go-build \ - --mount=type=bind,target=. \ CGO_ENABLED=0 go build -ldflags="-s -w -X main.version=${VERSION} -X main.commit=$(git rev-parse HEAD) -X main.date=$(date -u +%Y-%m-%dT%H:%M:%SZ)" \ -o /bin/github-mcp-server ./cmd/github-mcp-server # Make a stage to run the app -FROM gcr.io/distroless/base-debian12 +FROM gcr.io/distroless/base-debian12@sha256:937c7eaaf6f3f2d38a1f8c4aeff326f0c56e4593ea152e9e8f74d976dde52f56 # Add required MCP server annotation LABEL io.modelcontextprotocol.server.name="io.github.github/github-mcp-server" @@ -26,6 +39,8 @@ LABEL io.modelcontextprotocol.server.name="io.github.github/github-mcp-server" WORKDIR /server # Copy the binary from the build stage COPY --from=build /bin/github-mcp-server . +# Expose the default port +EXPOSE 8082 # Set the entrypoint to the server binary ENTRYPOINT ["/server/github-mcp-server"] # Default arguments for ENTRYPOINT diff --git a/README.md b/README.md index afe003002e..6e964a1925 100644 --- a/README.md +++ b/README.md @@ -983,9 +983,10 @@ The following sets of tools are available: - `fields`: Specific list of field IDs to include in the response when getting a project item (e.g. ["102589", "985201", "169875"]). If not provided, only the title field is included. Only used for 'get_project_item' method. (string[], optional) - `item_id`: The item's ID. Required for 'get_project_item' method. (number, optional) - `method`: The method to execute (string, required) - - `owner`: The owner (user or organization login). The name is not case sensitive. (string, required) + - `owner`: The owner (user or organization login). The name is not case sensitive. (string, optional) - `owner_type`: Owner type (user or org). If not provided, will be automatically detected. (string, optional) - - `project_number`: The project's number. (number, required) + - `project_number`: The project's number. (number, optional) + - `status_update_id`: The node ID of the project status update. Required for 'get_project_status_update' method. (string, optional) - **projects_list** - List GitHub Projects resources - **Required OAuth Scopes**: `read:project` @@ -997,11 +998,12 @@ The following sets of tools are available: - `owner`: The owner (user or organization login). The name is not case sensitive. (string, required) - `owner_type`: Owner type (user or org). If not provided, will automatically try both. (string, optional) - `per_page`: Results per page (max 50) (number, optional) - - `project_number`: The project's number. Required for 'list_project_fields' and 'list_project_items' methods. (number, optional) + - `project_number`: The project's number. Required for 'list_project_fields', 'list_project_items', and 'list_project_status_updates' methods. (number, optional) - `query`: Filter/query string. For list_projects: filter by title text and state (e.g. "roadmap is:open"). For list_project_items: advanced filtering using GitHub's project filtering syntax. (string, optional) - **projects_write** - Modify GitHub Project items - **Required OAuth Scopes**: `project` + - `body`: The body of the status update (markdown). Used for 'create_project_status_update' method. (string, optional) - `issue_number`: The issue number (use when item_type is 'issue' for 'add_project_item' method). Provide either issue_number or pull_request_number. (number, optional) - `item_id`: The project item ID. Required for 'update_project_item' and 'delete_project_item' methods. (number, optional) - `item_owner`: The owner (user or organization) of the repository containing the issue or pull request. Required for 'add_project_item' method. (string, optional) @@ -1012,6 +1014,9 @@ The following sets of tools are available: - `owner_type`: Owner type (user or org). If not provided, will be automatically detected. (string, optional) - `project_number`: The project's number. (number, required) - `pull_request_number`: The pull request number (use when item_type is 'pull_request' for 'add_project_item' method). Provide either issue_number or pull_request_number. (number, optional) + - `start_date`: The start date of the status update in YYYY-MM-DD format. Used for 'create_project_status_update' method. (string, optional) + - `status`: The status of the project. Used for 'create_project_status_update' method. (string, optional) + - `target_date`: The target date of the status update in YYYY-MM-DD format. Used for 'create_project_status_update' method. (string, optional) - `updated_field`: Object consisting of the ID of the project field to update and the new value for the field. To clear the field, set value to null. Example: {"id": 123456, "value": "New Value"}. Required for 'update_project_item' method. (object, optional) @@ -1033,6 +1038,14 @@ The following sets of tools are available: - `startSide`: For multi-line comments, the starting side of the diff that the comment applies to. LEFT indicates the previous state, RIGHT indicates the new state (string, optional) - `subjectType`: The level at which the comment is targeted (string, required) +- **add_reply_to_pull_request_comment** - Add reply to pull request comment + - **Required OAuth Scopes**: `repo` + - `body`: The text of the reply (string, required) + - `commentId`: The ID of the comment to reply to (number, required) + - `owner`: Repository owner (string, required) + - `pullNumber`: Pull request number (number, required) + - `repo`: Repository name (string, required) + - **create_pull_request** - Open new pull request - **Required OAuth Scopes**: `repo` - `base`: Branch to merge into (string, required) diff --git a/cmd/github-mcp-server/generate_docs.go b/cmd/github-mcp-server/generate_docs.go index 78fd6c40a9..7d7b1f6ab3 100644 --- a/cmd/github-mcp-server/generate_docs.go +++ b/cmd/github-mcp-server/generate_docs.go @@ -5,6 +5,7 @@ import ( "fmt" "net/url" "os" + "slices" "sort" "strings" @@ -234,7 +235,7 @@ func writeToolDoc(buf *strings.Builder, tool inventory.ServerTool) { for i, propName := range paramNames { prop := schema.Properties[propName] - required := contains(schema.Required, propName) + required := slices.Contains(schema.Required, propName) requiredStr := "optional" if required { requiredStr = "required" @@ -289,15 +290,6 @@ func scopesEqual(a, b []string) bool { return true } -func contains(slice []string, item string) bool { - for _, s := range slice { - if s == item { - return true - } - } - return false -} - // indentMultilineDescription adds the specified indent to all lines after the first line. // This ensures that multi-line descriptions maintain proper markdown list formatting. func indentMultilineDescription(description, indent string) string { @@ -319,14 +311,14 @@ func replaceSection(content, startMarker, endMarker, newContent string) (string, start := fmt.Sprintf("", startMarker) end := fmt.Sprintf("", endMarker) - startIdx := strings.Index(content, start) + before, _, ok := strings.Cut(content, start) endIdx := strings.Index(content, end) - if startIdx == -1 || endIdx == -1 { + if !ok || endIdx == -1 { return "", fmt.Errorf("markers not found: %s / %s", start, end) } var buf strings.Builder - buf.WriteString(content[:startIdx]) + buf.WriteString(before) buf.WriteString(start) buf.WriteString("\n") buf.WriteString(newContent) @@ -426,6 +418,7 @@ func generateRemoteOnlyToolsetsDoc() string { return strings.TrimSuffix(buf.String(), "\n") } + func generateDeprecatedAliasesDocs(docsPath string) error { // Read the current file content, err := os.ReadFile(docsPath) //#nosec G304 diff --git a/cmd/github-mcp-server/main.go b/cmd/github-mcp-server/main.go index c361a4d5ab..05c2c6e0be 100644 --- a/cmd/github-mcp-server/main.go +++ b/cmd/github-mcp-server/main.go @@ -9,6 +9,7 @@ import ( "github.com/github/github-mcp-server/internal/ghmcp" "github.com/github/github-mcp-server/pkg/github" + ghhttp "github.com/github/github-mcp-server/pkg/http" "github.com/spf13/cobra" "github.com/spf13/pflag" "github.com/spf13/viper" @@ -60,6 +61,14 @@ var ( } } + // Parse excluded tools (similar to tools) + var excludeTools []string + if viper.IsSet("exclude_tools") { + if err := viper.UnmarshalKey("exclude_tools", &excludeTools); err != nil { + return fmt.Errorf("failed to unmarshal exclude-tools: %w", err) + } + } + // Parse enabled features (similar to toolsets) var enabledFeatures []string if viper.IsSet("features") { @@ -84,11 +93,37 @@ var ( ContentWindowSize: viper.GetInt("content-window-size"), LockdownMode: viper.GetBool("lockdown-mode"), InsidersMode: viper.GetBool("insiders"), + ExcludeTools: excludeTools, RepoAccessCacheTTL: &ttl, } return ghmcp.RunStdioServer(stdioServerConfig) }, } + + httpCmd = &cobra.Command{ + Use: "http", + Short: "Start HTTP server", + Long: `Start an HTTP server that listens for MCP requests over HTTP.`, + RunE: func(_ *cobra.Command, _ []string) error { + ttl := viper.GetDuration("repo-access-cache-ttl") + httpConfig := ghhttp.ServerConfig{ + Version: version, + Host: viper.GetString("host"), + Port: viper.GetInt("port"), + BaseURL: viper.GetString("base-url"), + ResourcePath: viper.GetString("base-path"), + ExportTranslations: viper.GetBool("export-translations"), + EnableCommandLogging: viper.GetBool("enable-command-logging"), + LogFilePath: viper.GetString("log-file"), + ContentWindowSize: viper.GetInt("content-window-size"), + LockdownMode: viper.GetBool("lockdown-mode"), + RepoAccessCacheTTL: &ttl, + ScopeChallenge: viper.GetBool("scope-challenge"), + } + + return ghhttp.RunHTTPServer(httpConfig) + }, + } ) func init() { @@ -100,6 +135,7 @@ func init() { // Add global flags that will be shared by all commands rootCmd.PersistentFlags().StringSlice("toolsets", nil, github.GenerateToolsetsHelp()) rootCmd.PersistentFlags().StringSlice("tools", nil, "Comma-separated list of specific tools to enable") + rootCmd.PersistentFlags().StringSlice("exclude-tools", nil, "Comma-separated list of tool names to disable regardless of other settings") rootCmd.PersistentFlags().StringSlice("features", nil, "Comma-separated list of feature flags to enable") rootCmd.PersistentFlags().Bool("dynamic-toolsets", false, "Enable dynamic toolsets") rootCmd.PersistentFlags().Bool("read-only", false, "Restrict the server to read-only operations") @@ -112,9 +148,16 @@ func init() { rootCmd.PersistentFlags().Bool("insiders", false, "Enable insiders features") rootCmd.PersistentFlags().Duration("repo-access-cache-ttl", 5*time.Minute, "Override the repo access cache TTL (e.g. 1m, 0s to disable)") + // HTTP-specific flags + httpCmd.Flags().Int("port", 8082, "HTTP server port") + httpCmd.Flags().String("base-url", "", "Base URL where this server is publicly accessible (for OAuth resource metadata)") + httpCmd.Flags().String("base-path", "", "Externally visible base path for the HTTP server (for OAuth resource metadata)") + httpCmd.Flags().Bool("scope-challenge", false, "Enable OAuth scope challenge responses") + // Bind flag to viper _ = viper.BindPFlag("toolsets", rootCmd.PersistentFlags().Lookup("toolsets")) _ = viper.BindPFlag("tools", rootCmd.PersistentFlags().Lookup("tools")) + _ = viper.BindPFlag("exclude_tools", rootCmd.PersistentFlags().Lookup("exclude-tools")) _ = viper.BindPFlag("features", rootCmd.PersistentFlags().Lookup("features")) _ = viper.BindPFlag("dynamic_toolsets", rootCmd.PersistentFlags().Lookup("dynamic-toolsets")) _ = viper.BindPFlag("read-only", rootCmd.PersistentFlags().Lookup("read-only")) @@ -126,9 +169,13 @@ func init() { _ = viper.BindPFlag("lockdown-mode", rootCmd.PersistentFlags().Lookup("lockdown-mode")) _ = viper.BindPFlag("insiders", rootCmd.PersistentFlags().Lookup("insiders")) _ = viper.BindPFlag("repo-access-cache-ttl", rootCmd.PersistentFlags().Lookup("repo-access-cache-ttl")) - + _ = viper.BindPFlag("port", httpCmd.Flags().Lookup("port")) + _ = viper.BindPFlag("base-url", httpCmd.Flags().Lookup("base-url")) + _ = viper.BindPFlag("base-path", httpCmd.Flags().Lookup("base-path")) + _ = viper.BindPFlag("scope-challenge", httpCmd.Flags().Lookup("scope-challenge")) // Add subcommands rootCmd.AddCommand(stdioCmd) + rootCmd.AddCommand(httpCmd) } func initConfig() { diff --git a/cmd/mcpcurl/main.go b/cmd/mcpcurl/main.go index 17b4bc77c4..f35e6926c3 100644 --- a/cmd/mcpcurl/main.go +++ b/cmd/mcpcurl/main.go @@ -73,8 +73,8 @@ type ( // RequestParams contains the tool name and arguments RequestParams struct { - Name string `json:"name"` - Arguments map[string]interface{} `json:"arguments"` + Name string `json:"name"` + Arguments map[string]any `json:"arguments"` } // Content matches the response format of a text content response @@ -308,8 +308,8 @@ func addCommandFromTool(toolsCmd *cobra.Command, tool *Tool, prettyPrint bool) { } // buildArgumentsMap extracts flag values into a map of arguments -func buildArgumentsMap(cmd *cobra.Command, tool *Tool) (map[string]interface{}, error) { - arguments := make(map[string]interface{}) +func buildArgumentsMap(cmd *cobra.Command, tool *Tool) (map[string]any, error) { + arguments := make(map[string]any) for name, prop := range tool.InputSchema.Properties { switch prop.Type { @@ -340,7 +340,7 @@ func buildArgumentsMap(cmd *cobra.Command, tool *Tool) (map[string]interface{}, } case "object": if jsonStr, _ := cmd.Flags().GetString(name + "-json"); jsonStr != "" { - var jsonArray []interface{} + var jsonArray []any if err := json.Unmarshal([]byte(jsonStr), &jsonArray); err != nil { return nil, fmt.Errorf("error parsing JSON for %s: %w", name, err) } @@ -355,7 +355,7 @@ func buildArgumentsMap(cmd *cobra.Command, tool *Tool) (map[string]interface{}, } // buildJSONRPCRequest creates a JSON-RPC request with the given tool name and arguments -func buildJSONRPCRequest(method, toolName string, arguments map[string]interface{}) (string, error) { +func buildJSONRPCRequest(method, toolName string, arguments map[string]any) (string, error) { id, err := rand.Int(rand.Reader, big.NewInt(10000)) if err != nil { return "", fmt.Errorf("failed to generate random ID: %w", err) @@ -432,7 +432,7 @@ func printResponse(response string, prettyPrint bool) error { // Extract text from content items of type "text" for _, content := range resp.Result.Content { if content.Type == "text" { - var textContentObj map[string]interface{} + var textContentObj map[string]any err := json.Unmarshal([]byte(content.Text), &textContentObj) if err == nil { @@ -445,7 +445,7 @@ func printResponse(response string, prettyPrint bool) error { } // Fallback parsing as JSONL - var textContentList []map[string]interface{} + var textContentList []map[string]any if err := json.Unmarshal([]byte(content.Text), &textContentList); err != nil { return fmt.Errorf("failed to parse text content as a list: %w", err) } diff --git a/docs/installation-guides/install-copilot-cli.md b/docs/installation-guides/install-copilot-cli.md index 5f95a03ef9..4ac5b3712c 100644 --- a/docs/installation-guides/install-copilot-cli.md +++ b/docs/installation-guides/install-copilot-cli.md @@ -1,10 +1,48 @@ # Install GitHub MCP Server in Copilot CLI -## Prerequisites +The GitHub MCP server comes pre-installed in Copilot CLI, with read-only tools enabled by default. -1. Copilot CLI installed (see [official Copilot CLI documentation](https://docs.github.com/en/copilot/concepts/agents/about-copilot-cli)) -2. [GitHub Personal Access Token](https://github.com/settings/personal-access-tokens/new) with appropriate scopes -3. For local installation: [Docker](https://www.docker.com/) installed and running +## Built-in Server + +To verify the server is available, from an active Copilot CLI session: + +```bash +/mcp show github-mcp-server +``` + +### Per-Session Customization + +Use CLI flags to customize the server for a session: + +```bash +# Enable an additional toolset +copilot --add-github-mcp-toolset discussions + +# Enable multiple additional toolsets +copilot --add-github-mcp-toolset discussions --add-github-mcp-toolset stargazers + +# Enable all toolsets +copilot --enable-all-github-mcp-tools + +# Enable a specific tool +copilot --add-github-mcp-tool list_discussions + +# Disable the built-in server entirely +copilot --disable-builtin-mcps +``` + +Run `copilot --help` for all available flags. For the list of toolsets, see [Available toolsets](../../README.md#available-toolsets); for the list of tools, see [Tools](../../README.md#tools). + +## Custom Configuration + +You can configure the GitHub MCP server in Copilot CLI using either the interactive command or by manually editing the configuration file. + +> **Server naming:** Name your server `github-mcp-server` to replace the built-in server, or use a different name (e.g., `github`) to run alongside it. + +### Prerequisites + +1. [GitHub Personal Access Token](https://github.com/settings/personal-access-tokens/new) with appropriate scopes +2. For local server: [Docker](https://www.docker.com/) installed and running
Storing Your PAT Securely @@ -19,21 +57,17 @@ export GITHUB_PERSONAL_ACCESS_TOKEN=your_token_here
-## GitHub MCP Server Configuration - -You can configure the GitHub MCP server in Copilot CLI using either the interactive command or by manually editing the configuration file. - ### Method 1: Interactive Setup (Recommended) -Use the Copilot CLI to interactively add the MCP server: +From an active Copilot CLI session, run the interactive command: ```bash /mcp add ``` -Follow the prompts to configure the GitHub MCP server. +Follow the prompts to configure the server. -### Method 2: Manual Configuration +### Method 2: Manual Setup Create or edit the configuration file `~/.copilot/mcp-config.json` and add one of the following configurations: @@ -45,6 +79,7 @@ Connect to the hosted MCP server: { "mcpServers": { "github": { + "type": "http", "url": "https://api.githubcopilot.com/mcp/", "headers": { "Authorization": "Bearer ${GITHUB_PERSONAL_ACCESS_TOKEN}" @@ -54,6 +89,8 @@ Connect to the hosted MCP server: } ``` +For additional options like toolsets and read-only mode, see the [remote server documentation](../remote-server.md#optional-headers). + #### Local Docker With Docker running, you can run the GitHub MCP server in a container: @@ -81,9 +118,13 @@ With Docker running, you can run the GitHub MCP server in a container: #### Binary -You can download the latest binary release from the [GitHub releases page](https://github.com/github/github-mcp-server/releases) or build it from source by running `go build -o github-mcp-server ./cmd/github-mcp-server`. +You can download the latest binary release from the [GitHub releases page](https://github.com/github/github-mcp-server/releases) or build it from source by running: -Then, replacing `/path/to/binary` with the actual path to your binary, configure Copilot CLI with: +```bash +go build -o github-mcp-server ./cmd/github-mcp-server +``` + +Then configure (replace `/path/to/binary` with the actual path): ```json { @@ -101,35 +142,30 @@ Then, replacing `/path/to/binary` with the actual path to your binary, configure ## Verification -To verify that the GitHub MCP server has been configured: - -1. Start or restart Copilot CLI -2. The GitHub tools should be available for use in your conversations +1. Restart Copilot CLI +2. Run `/mcp show` to list configured servers +3. Try: "List my GitHub repositories" ## Troubleshooting ### Local Server Issues - **Docker errors**: Ensure Docker Desktop is running - ```bash - docker --version - ``` - **Image pull failures**: Try `docker logout ghcr.io` then retry -- **Docker not found**: Install Docker Desktop and ensure it's running ### Authentication Issues - **Invalid PAT**: Verify your GitHub PAT has correct scopes: - - `repo` - Repository operations - - `read:packages` - Docker image access (if using Docker) + - `repo` - Repository operations + - `read:packages` - Docker image access (if using Docker) - **Token expired**: Generate a new GitHub PAT ### Configuration Issues - **Invalid JSON**: Validate your configuration: - ```bash - cat ~/.copilot/mcp-config.json | jq . - ``` + ```bash + cat ~/.copilot/mcp-config.json | jq . + ``` ## References diff --git a/docs/remote-server.md b/docs/remote-server.md index 1496673936..cad9ed6040 100644 --- a/docs/remote-server.md +++ b/docs/remote-server.md @@ -121,13 +121,15 @@ The Remote GitHub MCP server supports the following URL path patterns: - `/` - Default toolset (see ["default" toolset](../README.md#default-toolset)) - `/readonly` - Default toolset in read-only mode - `/insiders` - Default toolset with insiders mode enabled -- `/insiders/readonly` - Default toolset with insiders mode in read-only mode +- `/readonly/insiders` - Default toolset in read-only mode with insiders mode enabled - `/x/all` - All available toolsets - `/x/all/readonly` - All available toolsets in read-only mode - `/x/all/insiders` - All available toolsets with insiders mode enabled +- `/x/all/readonly/insiders` - All available toolsets in read-only mode with insiders mode enabled - `/x/{toolset}` - Single specific toolset - `/x/{toolset}/readonly` - Single specific toolset in read-only mode - `/x/{toolset}/insiders` - Single specific toolset with insiders mode enabled +- `/x/{toolset}/readonly/insiders` - Single specific toolset in read-only mode with insiders mode enabled Note: `{toolset}` can only be a single toolset, not a comma-separated list. To combine multiple toolsets, use the `X-MCP-Toolsets` header instead. Path modifiers like `/readonly` and `/insiders` can be combined with the `X-MCP-Insiders` or `X-MCP-Readonly` headers. diff --git a/docs/server-configuration.md b/docs/server-configuration.md index 46ec3bc64e..506ac0354e 100644 --- a/docs/server-configuration.md +++ b/docs/server-configuration.md @@ -9,6 +9,7 @@ We currently support the following ways in which the GitHub MCP Server can be co |---------------|---------------|--------------| | Toolsets | `X-MCP-Toolsets` header or `/x/{toolset}` URL | `--toolsets` flag or `GITHUB_TOOLSETS` env var | | Individual Tools | `X-MCP-Tools` header | `--tools` flag or `GITHUB_TOOLS` env var | +| Exclude Tools | `X-MCP-Exclude-Tools` header | `--exclude-tools` flag or `GITHUB_EXCLUDE_TOOLS` env var | | Read-Only Mode | `X-MCP-Readonly` header or `/readonly` URL | `--read-only` flag or `GITHUB_READ_ONLY` env var | | Dynamic Mode | Not available | `--dynamic-toolsets` flag or `GITHUB_DYNAMIC_TOOLSETS` env var | | Lockdown Mode | `X-MCP-Lockdown` header | `--lockdown-mode` flag or `GITHUB_LOCKDOWN_MODE` env var | @@ -20,10 +21,12 @@ We currently support the following ways in which the GitHub MCP Server can be co ## How Configuration Works -All configuration options are **composable**: you can combine toolsets, individual tools, dynamic discovery, read-only mode and lockdown mode in any way that suits your workflow. +All configuration options are **composable**: you can combine toolsets, individual tools, excluded tools, dynamic discovery, read-only mode and lockdown mode in any way that suits your workflow. Note: **read-only** mode acts as a strict security filter that takes precedence over any other configuration, by disabling write tools even when explicitly requested. +Note: **excluded tools** takes precedence over toolsets and individual tools — listed tools are always excluded, even if their toolset is enabled or they are explicitly added via `--tools` / `X-MCP-Tools`. + --- ## Configuration Examples @@ -170,6 +173,56 @@ Enable entire toolsets, then add individual tools from toolsets you don't want f --- +### Excluding Specific Tools + +**Best for:** Users who want to enable a broad toolset but need to exclude specific tools for security, compliance, or to prevent undesired behavior. + +Listed tools are removed regardless of any other configuration — even if their toolset is enabled or they are individually added. + + + + + + + +
Remote ServerLocal Server
+ +```json +{ + "type": "http", + "url": "https://api.githubcopilot.com/mcp/", + "headers": { + "X-MCP-Toolsets": "pull_requests", + "X-MCP-Exclude-Tools": "create_pull_request,merge_pull_request" + } +} +``` + + + +```json +{ + "type": "stdio", + "command": "go", + "args": [ + "run", + "./cmd/github-mcp-server", + "stdio", + "--toolsets=pull_requests", + "--exclude-tools=create_pull_request,merge_pull_request" + ], + "env": { + "GITHUB_PERSONAL_ACCESS_TOKEN": "${input:github_token}" + } +} +``` + +
+ +**Result:** All pull request tools except `create_pull_request` and `merge_pull_request` — the user gets read and review tools only. + +--- + ### Read-Only Mode **Best for:** Security conscious users who want to ensure the server won't allow operations that modify issues, pull requests, repositories etc. diff --git a/docs/streamable-http.md b/docs/streamable-http.md new file mode 100644 index 0000000000..0a11c5ea76 --- /dev/null +++ b/docs/streamable-http.md @@ -0,0 +1,93 @@ +# Streamable HTTP Server + +The Streamable HTTP mode enables the GitHub MCP Server to run as an HTTP service, allowing clients to connect via standard HTTP protocols. This mode is ideal for deployment scenarios where stdio transport isn't suitable, such as reverse proxy setups, containerized environments, or distributed architectures. + +## Features + +- **Streamable HTTP Transport** — Full HTTP server with streaming support for real-time tool responses +- **OAuth Metadata Endpoints** — Standard `.well-known/oauth-protected-resource` discovery for OAuth clients +- **Scope Challenge Support** — Automatic scope validation with proper HTTP 403 responses and `WWW-Authenticate` headers +- **Scope Filtering** — Restrict available tools based on authenticated credentials and permissions +- **Custom Base Paths** — Support for reverse proxy deployments with customizable base URLs + +## Running the Server + +### Basic HTTP Server + +Start the server on the default port (8082): + +```bash +github-mcp-server http +``` + +The server will be available at `http://localhost:8082`. + +### With Scope Challenge + +Enable scope validation to enforce GitHub permission checks: + +```bash +github-mcp-server http --scope-challenge +``` + +When `--scope-challenge` is enabled, requests with insufficient scopes receive a `403 Forbidden` response with a `WWW-Authenticate` header indicating the required scopes. + +### With OAuth Metadata Discovery + +For use behind reverse proxies or with custom domains, expose OAuth metadata endpoints: + +```bash +github-mcp-server http --scope-challenge --base-url https://myserver.com --base-path /mcp +``` + +The OAuth protected resource metadata's `resource` attribute will be populated with the full URL to the server's protected resource endpoint: + +```json +{ + "resource_name": "GitHub MCP Server", + "resource": "https://myserver.com/mcp", + "authorization_servers": [ + "https://github.com/login/oauth" + ], + "scopes_supported": [ + "repo", + ... + ], + ... +} +``` + +This allows OAuth clients to discover authentication requirements and endpoint information automatically. + +## Client Configuration + +### Using OAuth Authentication + +If your IDE or client has GitHub credentials configured (i.e. VS Code), simply reference the HTTP server: + +```json +{ + "type": "http", + "url": "http://localhost:8082" +} +``` + +The server will use the client's existing GitHub authentication. + +### Using Bearer Tokens or Custom Headers + +To provide PAT credentials, or to customize server behavior preferences, you can include additional headers in the client configuration: + +```json +{ + "type": "http", + "url": "http://localhost:8082", + "headers": { + "Authorization": "Bearer ghp_yourtokenhere", + "X-MCP-Toolsets": "default", + "X-MCP-Readonly": "true" + } +} +``` + +See [Remote Server](./remote-server.md) documentation for more details on client configuration options. diff --git a/e2e/e2e_test.go b/e2e/e2e_test.go index 86ff45b292..ad40ecad02 100644 --- a/e2e/e2e_test.go +++ b/e2e/e2e_test.go @@ -18,7 +18,7 @@ import ( "github.com/github/github-mcp-server/internal/ghmcp" "github.com/github/github-mcp-server/pkg/github" "github.com/github/github-mcp-server/pkg/translations" - gogithub "github.com/google/go-github/v79/github" + gogithub "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/stretchr/testify/require" ) diff --git a/go.mod b/go.mod index 10bbde9d11..f1ffb02a2f 100644 --- a/go.mod +++ b/go.mod @@ -3,51 +3,48 @@ module github.com/github/github-mcp-server go 1.24.0 require ( - github.com/google/go-github/v79 v79.0.0 + github.com/go-chi/chi/v5 v5.2.5 + github.com/go-viper/mapstructure/v2 v2.5.0 + github.com/google/go-github/v82 v82.0.0 github.com/google/jsonschema-go v0.4.2 - github.com/josephburnett/jd v1.9.2 + github.com/josephburnett/jd/v2 v2.4.0 + github.com/lithammer/fuzzysearch v1.1.8 github.com/microcosm-cc/bluemonday v1.0.27 + github.com/modelcontextprotocol/go-sdk v1.3.0 github.com/muesli/cache2go v0.0.0-20221011235721-518229cd8021 + github.com/shurcooL/githubv4 v0.0.0-20240727222349-48295856cce7 + github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466 github.com/spf13/cobra v1.10.2 + github.com/spf13/pflag v1.0.10 github.com/spf13/viper v1.21.0 github.com/stretchr/testify v1.11.1 + github.com/yosida95/uritemplate/v3 v3.0.2 ) require ( github.com/aymerick/douceur v0.2.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/fsnotify/fsnotify v1.9.0 // indirect - github.com/go-openapi/jsonpointer v0.19.5 // indirect - github.com/go-openapi/swag v0.21.1 // indirect - github.com/go-viper/mapstructure/v2 v2.5.0 - github.com/google/go-querystring v1.1.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/google/go-querystring v1.2.0 // indirect github.com/gorilla/css v1.0.1 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/josharian/intern v1.0.0 // indirect - github.com/lithammer/fuzzysearch v1.1.8 github.com/mailru/easyjson v0.7.7 // indirect - github.com/modelcontextprotocol/go-sdk v1.2.0 github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/rogpeppe/go-internal v1.13.1 // indirect github.com/sagikazarmark/locafero v0.11.0 // indirect - github.com/shurcooL/githubv4 v0.0.0-20240727222349-48295856cce7 - github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466 github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect github.com/spf13/afero v1.15.0 // indirect github.com/spf13/cast v1.10.0 // indirect - github.com/spf13/pflag v1.0.10 github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect - github.com/yosida95/uritemplate/v3 v3.0.2 - github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 // indirect go.yaml.in/yaml/v3 v3.0.4 // indirect - golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 // indirect + golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect golang.org/x/net v0.38.0 // indirect golang.org/x/oauth2 v0.30.0 // indirect golang.org/x/sys v0.31.0 // indirect golang.org/x/text v0.28.0 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index b364f2ef31..fc8c2241b5 100644 --- a/go.sum +++ b/go.sum @@ -1,70 +1,59 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= -github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= -github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= -github.com/go-openapi/swag v0.21.1 h1:wm0rhTb5z7qpJRHBdPOMuY4QjVUMbF6/kwoYeRAOrKU= -github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-chi/chi/v5 v5.2.5 h1:Eg4myHZBjyvJmAFjFvWgrqDTXFyOzjj7YIm3L3mu6Ug= +github.com/go-chi/chi/v5 v5.2.5/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= github.com/go-viper/mapstructure/v2 v2.5.0 h1:vM5IJoUAy3d7zRSVtIwQgBj7BiWtMPfmPEgAXnvj1Ro= github.com/go-viper/mapstructure/v2 v2.5.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/go-github/v79 v79.0.0 h1:MdodQojuFPBhmtwHiBcIGLw/e/wei2PvFX9ndxK0X4Y= -github.com/google/go-github/v79 v79.0.0/go.mod h1:OAFbNhq7fQwohojb06iIIQAB9CBGYLq999myfUFnrS4= -github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= -github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= +github.com/google/go-github/v82 v82.0.0 h1:OH09ESON2QwKCUVMYmMcVu1IFKFoaZHwqYaUtr/MVfk= +github.com/google/go-github/v82 v82.0.0/go.mod h1:hQ6Xo0VKfL8RZ7z1hSfB4fvISg0QqHOqe9BP0qo+WvM= +github.com/google/go-querystring v1.2.0 h1:yhqkPbu2/OH+V9BfpCVPZkNmUXhb2gBxJArfhIxNtP0= +github.com/google/go-querystring v1.2.0/go.mod h1:8IFJqpSRITyJ8QhQ13bmbeMBDfmeEJZD5A0egEOmkqU= github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbcqoRA8= github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE= github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/josephburnett/jd v1.9.2 h1:ECJRRFXCCqbtidkAHckHGSZm/JIaAxS1gygHLF8MI5Y= -github.com/josephburnett/jd v1.9.2/go.mod h1:bImDr8QXpxMb3SD+w1cDRHp97xP6UwI88xUAuxwDQfM= +github.com/josephburnett/jd/v2 v2.4.0 h1:8MDRpbs/CATx4FR6Px8YMSp6NPGtI8pUWtDrgqI74tI= +github.com/josephburnett/jd/v2 v2.4.0/go.mod h1:0I5+gbo7y8diuajJjm79AF44eqTheSJy1K7DSbIUFAQ= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lithammer/fuzzysearch v1.1.8 h1:/HIuJnjHuXS8bKaiTMeeDlW2/AyIWk2brx1V8LFgLN4= github.com/lithammer/fuzzysearch v1.1.8/go.mod h1:IdqeyBClc3FFqSzYq/MXESsS4S0FsZ5ajtkr5xPLts4= -github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA= -github.com/modelcontextprotocol/go-sdk v1.2.0 h1:Y23co09300CEk8iZ/tMxIX1dVmKZkzoSBZOpJwUnc/s= -github.com/modelcontextprotocol/go-sdk v1.2.0/go.mod h1:6fM3LCm3yV7pAs8isnKLn07oKtB0MP9LHd3DfAcKw10= +github.com/modelcontextprotocol/go-sdk v1.3.0 h1:gMfZkv3DzQF5q/DcQePo5rahEY+sguyPfXDfNBcT0Zs= +github.com/modelcontextprotocol/go-sdk v1.3.0/go.mod h1:AnQ//Qc6+4nIyyrB4cxBU7UW9VibK4iOZBeyP/rF1IE= github.com/muesli/cache2go v0.0.0-20221011235721-518229cd8021 h1:31Y+Yu373ymebRdJN1cWLLooHH8xAr0MhKTEJGV/87g= github.com/muesli/cache2go v0.0.0-20221011235721-518229cd8021/go.mod h1:WERUkUryfUWlrHnFSO/BEUZ+7Ns8aZy7iVOGewxKzcc= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= -github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc= github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik= @@ -85,26 +74,21 @@ github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU= github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4= github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4= -github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 h1:BHyfKlQyqbsFN5p3IfnEUduWvb9is428/nNb5L3U01M= -github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= -golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= +golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw= +golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -143,16 +127,8 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/ghmcp/server.go b/internal/ghmcp/server.go index b6e744d3a8..5c4e7f6f1b 100644 --- a/internal/ghmcp/server.go +++ b/internal/ghmcp/server.go @@ -6,7 +6,6 @@ import ( "io" "log/slog" "net/http" - "net/url" "os" "os/signal" "strings" @@ -15,69 +14,19 @@ import ( "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/github" + "github.com/github/github-mcp-server/pkg/http/transport" "github.com/github/github-mcp-server/pkg/inventory" "github.com/github/github-mcp-server/pkg/lockdown" mcplog "github.com/github/github-mcp-server/pkg/log" "github.com/github/github-mcp-server/pkg/raw" "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" - gogithub "github.com/google/go-github/v79/github" + "github.com/github/github-mcp-server/pkg/utils" + gogithub "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/shurcooL/githubv4" ) -type MCPServerConfig struct { - // Version of the server - Version string - - // GitHub Host to target for API requests (e.g. github.com or github.enterprise.com) - Host string - - // GitHub Token to authenticate with the GitHub API - Token string - - // EnabledToolsets is a list of toolsets to enable - // See: https://github.com/github/github-mcp-server?tab=readme-ov-file#tool-configuration - EnabledToolsets []string - - // EnabledTools is a list of specific tools to enable (additive to toolsets) - // When specified, these tools are registered in addition to any specified toolset tools - EnabledTools []string - - // EnabledFeatures is a list of feature flags that are enabled - // Items with FeatureFlagEnable matching an entry in this list will be available - EnabledFeatures []string - - // Whether to enable dynamic toolsets - // See: https://github.com/github/github-mcp-server?tab=readme-ov-file#dynamic-tool-discovery - DynamicToolsets bool - - // ReadOnly indicates if we should only offer read-only tools - ReadOnly bool - - // Translator provides translated text for the server tooling - Translator translations.TranslationHelperFunc - - // Content window size - ContentWindowSize int - - // LockdownMode indicates if we should enable lockdown mode - LockdownMode bool - - // InsidersMode indicates if we should enable experimental features - InsidersMode bool - - // Logger is used for logging within the server - Logger *slog.Logger - // RepoAccessTTL overrides the default TTL for repository access cache entries. - RepoAccessTTL *time.Duration - - // TokenScopes contains the OAuth scopes available to the token. - // When non-nil, tools requiring scopes not in this list will be hidden. - // This is used for PAT scope filtering where we can't issue scope challenges. - TokenScopes []string -} - // githubClients holds all the GitHub API clients created for a server instance. type githubClients struct { rest *gogithub.Client @@ -88,27 +37,48 @@ type githubClients struct { } // createGitHubClients creates all the GitHub API clients needed by the server. -func createGitHubClients(cfg MCPServerConfig, apiHost apiHost) (*githubClients, error) { +func createGitHubClients(cfg github.MCPServerConfig, apiHost utils.APIHostResolver) (*githubClients, error) { + restURL, err := apiHost.BaseRESTURL(context.Background()) + if err != nil { + return nil, fmt.Errorf("failed to get base REST URL: %w", err) + } + + uploadURL, err := apiHost.UploadURL(context.Background()) + if err != nil { + return nil, fmt.Errorf("failed to get upload URL: %w", err) + } + + graphQLURL, err := apiHost.GraphqlURL(context.Background()) + if err != nil { + return nil, fmt.Errorf("failed to get GraphQL URL: %w", err) + } + + rawURL, err := apiHost.RawURL(context.Background()) + if err != nil { + return nil, fmt.Errorf("failed to get Raw URL: %w", err) + } + // Construct REST client restClient := gogithub.NewClient(nil).WithAuthToken(cfg.Token) restClient.UserAgent = fmt.Sprintf("github-mcp-server/%s", cfg.Version) - restClient.BaseURL = apiHost.baseRESTURL - restClient.UploadURL = apiHost.uploadURL + restClient.BaseURL = restURL + restClient.UploadURL = uploadURL // Construct GraphQL client // We use NewEnterpriseClient unconditionally since we already parsed the API host gqlHTTPClient := &http.Client{ - Transport: &bearerAuthTransport{ - transport: &github.GraphQLFeaturesTransport{ + Transport: &transport.BearerAuthTransport{ + Transport: &transport.GraphQLFeaturesTransport{ Transport: http.DefaultTransport, }, - token: cfg.Token, + Token: cfg.Token, }, } - gqlClient := githubv4.NewEnterpriseClient(apiHost.graphqlURL.String(), gqlHTTPClient) + + gqlClient := githubv4.NewEnterpriseClient(graphQLURL.String(), gqlHTTPClient) // Create raw content client (shares REST client's HTTP transport) - rawClient := raw.NewClient(restClient, apiHost.rawURL) + rawClient := raw.NewClient(restClient, rawURL) // Set up repo access cache for lockdown mode var repoAccessCache *lockdown.RepoAccessCache @@ -131,35 +101,8 @@ func createGitHubClients(cfg MCPServerConfig, apiHost apiHost) (*githubClients, }, nil } -// resolveEnabledToolsets determines which toolsets should be enabled based on config. -// Returns nil for "use defaults", empty slice for "none", or explicit list. -func resolveEnabledToolsets(cfg MCPServerConfig) []string { - enabledToolsets := cfg.EnabledToolsets - - // In dynamic mode, remove "all" and "default" since users enable toolsets on demand - if cfg.DynamicToolsets && enabledToolsets != nil { - enabledToolsets = github.RemoveToolset(enabledToolsets, string(github.ToolsetMetadataAll.ID)) - enabledToolsets = github.RemoveToolset(enabledToolsets, string(github.ToolsetMetadataDefault.ID)) - } - - if enabledToolsets != nil { - return enabledToolsets - } - if cfg.DynamicToolsets { - // Dynamic mode with no toolsets specified: start empty so users enable on demand - return []string{} - } - if len(cfg.EnabledTools) > 0 { - // When specific tools are requested but no toolsets, don't use default toolsets - // This matches the original behavior: --tools=X alone registers only X - return []string{} - } - // nil means "use defaults" in WithToolsets - return nil -} - -func NewMCPServer(cfg MCPServerConfig) (*mcp.Server, error) { - apiHost, err := parseAPIHost(cfg.Host) +func NewStdioMCPServer(ctx context.Context, cfg github.MCPServerConfig) (*mcp.Server, error) { + apiHost, err := utils.NewAPIHost(cfg.Host) if err != nil { return nil, fmt.Errorf("failed to parse API host: %w", err) } @@ -169,19 +112,33 @@ func NewMCPServer(cfg MCPServerConfig) (*mcp.Server, error) { return nil, fmt.Errorf("failed to create GitHub clients: %w", err) } - enabledToolsets := resolveEnabledToolsets(cfg) - // Create feature checker featureChecker := createFeatureChecker(cfg.EnabledFeatures) + // Create dependencies for tool handlers + deps := github.NewBaseDeps( + clients.rest, + clients.gql, + clients.raw, + clients.repoAccess, + cfg.Translator, + github.FeatureFlags{ + LockdownMode: cfg.LockdownMode, + InsidersMode: cfg.InsidersMode, + }, + cfg.ContentWindowSize, + featureChecker, + ) // Build and register the tool/resource/prompt inventory inventoryBuilder := github.NewInventory(cfg.Translator). WithDeprecatedAliases(github.DeprecatedToolAliases). WithReadOnly(cfg.ReadOnly). - WithToolsets(enabledToolsets). - WithTools(cfg.EnabledTools). + WithToolsets(github.ResolvedEnabledToolsets(cfg.DynamicToolsets, cfg.EnabledToolsets, cfg.EnabledTools)). + WithTools(github.CleanTools(cfg.EnabledTools)). + WithExcludeTools(cfg.ExcludeTools). + WithServerInstructions(). WithFeatureChecker(featureChecker). - WithServerInstructions() + WithInsidersMode(cfg.InsidersMode) // Apply token scope filtering if scopes are known (for PAT filtering) if cfg.TokenScopes != nil { @@ -193,99 +150,23 @@ func NewMCPServer(cfg MCPServerConfig) (*mcp.Server, error) { return nil, fmt.Errorf("failed to build inventory: %w", err) } - // Create the MCP server - serverOpts := &mcp.ServerOptions{ - Instructions: inventory.Instructions(), - Logger: cfg.Logger, - CompletionHandler: github.CompletionsHandler(func(_ context.Context) (*gogithub.Client, error) { - return clients.rest, nil - }), + ghServer, err := github.NewMCPServer(ctx, &cfg, deps, inventory) + if err != nil { + return nil, fmt.Errorf("failed to create GitHub MCP server: %w", err) } - // In dynamic mode, explicitly advertise capabilities since tools/resources/prompts - // may be enabled at runtime even if none are registered initially. - if cfg.DynamicToolsets { - serverOpts.Capabilities = &mcp.ServerCapabilities{ - Tools: &mcp.ToolCapabilities{}, - Resources: &mcp.ResourceCapabilities{}, - Prompts: &mcp.PromptCapabilities{}, - } + // Register MCP App UI resources if available (requires running script/build-ui). + // We check availability to allow Insiders mode to work for non-UI features + // even when UI assets haven't been built. + if cfg.InsidersMode && github.UIAssetsAvailable() { + github.RegisterUIResources(ghServer) } - ghServer := github.NewServer(cfg.Version, serverOpts) - - // Add middlewares - ghServer.AddReceivingMiddleware(addGitHubAPIErrorToContext) ghServer.AddReceivingMiddleware(addUserAgentsMiddleware(cfg, clients.rest, clients.gqlHTTP)) - // Create dependencies for tool handlers - deps := github.NewBaseDeps( - clients.rest, - clients.gql, - clients.raw, - clients.repoAccess, - cfg.Translator, - github.FeatureFlags{ - LockdownMode: cfg.LockdownMode, - InsidersMode: cfg.InsidersMode, - }, - cfg.ContentWindowSize, - featureChecker, - ) - - // Inject dependencies into context for all tool handlers - ghServer.AddReceivingMiddleware(func(next mcp.MethodHandler) mcp.MethodHandler { - return func(ctx context.Context, method string, req mcp.Request) (mcp.Result, error) { - return next(github.ContextWithDeps(ctx, deps), method, req) - } - }) - - if unrecognized := inventory.UnrecognizedToolsets(); len(unrecognized) > 0 { - fmt.Fprintf(os.Stderr, "Warning: unrecognized toolsets ignored: %s\n", strings.Join(unrecognized, ", ")) - } - - // Register GitHub tools/resources/prompts from the inventory. - // In dynamic mode with no explicit toolsets, this is a no-op since enabledToolsets - // is empty - users enable toolsets at runtime via the dynamic tools below (but can - // enable toolsets or tools explicitly that do need registration). - inventory.RegisterAll(context.Background(), ghServer, deps) - - // Register dynamic toolset management tools (enable/disable) - these are separate - // meta-tools that control the inventory, not part of the inventory itself - if cfg.DynamicToolsets { - registerDynamicTools(ghServer, inventory, deps, cfg.Translator) - } - return ghServer, nil } -// registerDynamicTools adds the dynamic toolset enable/disable tools to the server. -func registerDynamicTools(server *mcp.Server, inventory *inventory.Inventory, deps *github.BaseDeps, t translations.TranslationHelperFunc) { - dynamicDeps := github.DynamicToolDependencies{ - Server: server, - Inventory: inventory, - ToolDeps: deps, - T: t, - } - for _, tool := range github.DynamicTools(inventory) { - tool.RegisterFunc(server, dynamicDeps) - } -} - -// createFeatureChecker returns a FeatureFlagChecker that checks if a flag name -// is present in the provided list of enabled features. For the local server, -// this is populated from the --features CLI flag. -func createFeatureChecker(enabledFeatures []string) inventory.FeatureFlagChecker { - // Build a set for O(1) lookup - featureSet := make(map[string]bool, len(enabledFeatures)) - for _, f := range enabledFeatures { - featureSet[f] = true - } - return func(_ context.Context, flagName string) (bool, error) { - return featureSet[flagName], nil - } -} - type StdioServerConfig struct { // Version of the server Version string @@ -334,6 +215,11 @@ type StdioServerConfig struct { // InsidersMode indicates if we should enable experimental features InsidersMode bool + // ExcludeTools is a list of tool names to disable regardless of other settings. + // These tools will be excluded even if their toolset is enabled or they are + // explicitly listed in EnabledTools. + ExcludeTools []string + // RepoAccessCacheTTL overrides the default TTL for repository access cache entries. RepoAccessCacheTTL *time.Duration } @@ -378,7 +264,7 @@ func RunStdioServer(cfg StdioServerConfig) error { logger.Debug("skipping scope filtering for non-PAT token") } - ghServer, err := NewMCPServer(MCPServerConfig{ + ghServer, err := NewStdioMCPServer(ctx, github.MCPServerConfig{ Version: cfg.Version, Host: cfg.Host, Token: cfg.Token, @@ -391,6 +277,7 @@ func RunStdioServer(cfg StdioServerConfig) error { ContentWindowSize: cfg.ContentWindowSize, LockdownMode: cfg.LockdownMode, InsidersMode: cfg.InsidersMode, + ExcludeTools: cfg.ExcludeTools, Logger: logger, RepoAccessTTL: cfg.RepoAccessCacheTTL, TokenScopes: tokenScopes, @@ -440,214 +327,21 @@ func RunStdioServer(cfg StdioServerConfig) error { return nil } -type apiHost struct { - baseRESTURL *url.URL - graphqlURL *url.URL - uploadURL *url.URL - rawURL *url.URL -} - -func newDotcomHost() (apiHost, error) { - baseRestURL, err := url.Parse("https://api.github.com/") - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse dotcom REST URL: %w", err) - } - - gqlURL, err := url.Parse("https://api.github.com/graphql") - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse dotcom GraphQL URL: %w", err) - } - - uploadURL, err := url.Parse("https://uploads.github.com") - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse dotcom Upload URL: %w", err) - } - - rawURL, err := url.Parse("https://raw.githubusercontent.com/") - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse dotcom Raw URL: %w", err) - } - - return apiHost{ - baseRESTURL: baseRestURL, - graphqlURL: gqlURL, - uploadURL: uploadURL, - rawURL: rawURL, - }, nil -} - -func newGHECHost(hostname string) (apiHost, error) { - u, err := url.Parse(hostname) - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHEC URL: %w", err) - } - - // Unsecured GHEC would be an error - if u.Scheme == "http" { - return apiHost{}, fmt.Errorf("GHEC URL must be HTTPS") - } - - restURL, err := url.Parse(fmt.Sprintf("https://api.%s/", u.Hostname())) - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHEC REST URL: %w", err) - } - - gqlURL, err := url.Parse(fmt.Sprintf("https://api.%s/graphql", u.Hostname())) - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHEC GraphQL URL: %w", err) - } - - uploadURL, err := url.Parse(fmt.Sprintf("https://uploads.%s/", u.Hostname())) - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHEC Upload URL: %w", err) - } - - rawURL, err := url.Parse(fmt.Sprintf("https://raw.%s/", u.Hostname())) - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHEC Raw URL: %w", err) - } - - return apiHost{ - baseRESTURL: restURL, - graphqlURL: gqlURL, - uploadURL: uploadURL, - rawURL: rawURL, - }, nil -} - -func newGHESHost(hostname string) (apiHost, error) { - u, err := url.Parse(hostname) - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHES URL: %w", err) - } - - restURL, err := url.Parse(fmt.Sprintf("%s://%s/api/v3/", u.Scheme, u.Hostname())) - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHES REST URL: %w", err) - } - - gqlURL, err := url.Parse(fmt.Sprintf("%s://%s/api/graphql", u.Scheme, u.Hostname())) - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHES GraphQL URL: %w", err) - } - - // Check if subdomain isolation is enabled - // See https://docs.github.com/en/enterprise-server@3.17/admin/configuring-settings/hardening-security-for-your-enterprise/enabling-subdomain-isolation#about-subdomain-isolation - hasSubdomainIsolation := checkSubdomainIsolation(u.Scheme, u.Hostname()) - - var uploadURL *url.URL - if hasSubdomainIsolation { - // With subdomain isolation: https://uploads.hostname/ - uploadURL, err = url.Parse(fmt.Sprintf("%s://uploads.%s/", u.Scheme, u.Hostname())) - } else { - // Without subdomain isolation: https://hostname/api/uploads/ - uploadURL, err = url.Parse(fmt.Sprintf("%s://%s/api/uploads/", u.Scheme, u.Hostname())) - } - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHES Upload URL: %w", err) - } - - var rawURL *url.URL - if hasSubdomainIsolation { - // With subdomain isolation: https://raw.hostname/ - rawURL, err = url.Parse(fmt.Sprintf("%s://raw.%s/", u.Scheme, u.Hostname())) - } else { - // Without subdomain isolation: https://hostname/raw/ - rawURL, err = url.Parse(fmt.Sprintf("%s://%s/raw/", u.Scheme, u.Hostname())) - } - if err != nil { - return apiHost{}, fmt.Errorf("failed to parse GHES Raw URL: %w", err) - } - - return apiHost{ - baseRESTURL: restURL, - graphqlURL: gqlURL, - uploadURL: uploadURL, - rawURL: rawURL, - }, nil -} - -// checkSubdomainIsolation detects if GitHub Enterprise Server has subdomain isolation enabled -// by attempting to ping the raw./_ping endpoint on the subdomain. The raw subdomain must always exist for subdomain isolation. -func checkSubdomainIsolation(scheme, hostname string) bool { - subdomainURL := fmt.Sprintf("%s://raw.%s/_ping", scheme, hostname) - - client := &http.Client{ - Timeout: 5 * time.Second, - // Don't follow redirects - we just want to check if the endpoint exists - //nolint:revive // parameters are required by http.Client.CheckRedirect signature - CheckRedirect: func(req *http.Request, via []*http.Request) error { - return http.ErrUseLastResponse - }, - } - - resp, err := client.Get(subdomainURL) - if err != nil { - return false - } - defer resp.Body.Close() - - return resp.StatusCode == http.StatusOK -} - -// Note that this does not handle ports yet, so development environments are out. -func parseAPIHost(s string) (apiHost, error) { - if s == "" { - return newDotcomHost() - } - - u, err := url.Parse(s) - if err != nil { - return apiHost{}, fmt.Errorf("could not parse host as URL: %s", s) - } - - if u.Scheme == "" { - return apiHost{}, fmt.Errorf("host must have a scheme (http or https): %s", s) - } - - if strings.HasSuffix(u.Hostname(), "github.com") { - return newDotcomHost() - } - - if strings.HasSuffix(u.Hostname(), "ghe.com") { - return newGHECHost(s) +// createFeatureChecker returns a FeatureFlagChecker that checks if a flag name +// is present in the provided list of enabled features. For the local server, +// this is populated from the --features CLI flag. +func createFeatureChecker(enabledFeatures []string) inventory.FeatureFlagChecker { + // Build a set for O(1) lookup + featureSet := make(map[string]bool, len(enabledFeatures)) + for _, f := range enabledFeatures { + featureSet[f] = true } - - return newGHESHost(s) -} - -type userAgentTransport struct { - transport http.RoundTripper - agent string -} - -func (t *userAgentTransport) RoundTrip(req *http.Request) (*http.Response, error) { - req = req.Clone(req.Context()) - req.Header.Set("User-Agent", t.agent) - return t.transport.RoundTrip(req) -} - -type bearerAuthTransport struct { - transport http.RoundTripper - token string -} - -func (t *bearerAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { - req = req.Clone(req.Context()) - req.Header.Set("Authorization", "Bearer "+t.token) - return t.transport.RoundTrip(req) -} - -func addGitHubAPIErrorToContext(next mcp.MethodHandler) mcp.MethodHandler { - return func(ctx context.Context, method string, req mcp.Request) (result mcp.Result, err error) { - // Ensure the context is cleared of any previous errors - // as context isn't propagated through middleware - ctx = errors.ContextWithGitHubErrors(ctx) - return next(ctx, method, req) + return func(_ context.Context, flagName string) (bool, error) { + return featureSet[flagName], nil } } -func addUserAgentsMiddleware(cfg MCPServerConfig, restClient *gogithub.Client, gqlHTTPClient *http.Client) func(next mcp.MethodHandler) mcp.MethodHandler { +func addUserAgentsMiddleware(cfg github.MCPServerConfig, restClient *gogithub.Client, gqlHTTPClient *http.Client) func(next mcp.MethodHandler) mcp.MethodHandler { return func(next mcp.MethodHandler) mcp.MethodHandler { return func(ctx context.Context, method string, request mcp.Request) (result mcp.Result, err error) { if method != "initialize" { @@ -666,12 +360,15 @@ func addUserAgentsMiddleware(cfg MCPServerConfig, restClient *gogithub.Client, g message.Params.ClientInfo.Name, message.Params.ClientInfo.Version, ) + if cfg.InsidersMode { + userAgent += " (insiders)" + } restClient.UserAgent = userAgent - gqlHTTPClient.Transport = &userAgentTransport{ - transport: gqlHTTPClient.Transport, - agent: userAgent, + gqlHTTPClient.Transport = &transport.UserAgentTransport{ + Transport: gqlHTTPClient.Transport, + Agent: userAgent, } return next(ctx, method, request) @@ -682,14 +379,12 @@ func addUserAgentsMiddleware(cfg MCPServerConfig, restClient *gogithub.Client, g // fetchTokenScopesForHost fetches the OAuth scopes for a token from the GitHub API. // It constructs the appropriate API host URL based on the configured host. func fetchTokenScopesForHost(ctx context.Context, token, host string) ([]string, error) { - apiHost, err := parseAPIHost(host) + apiHost, err := utils.NewAPIHost(host) if err != nil { return nil, fmt.Errorf("failed to parse API host: %w", err) } - fetcher := scopes.NewFetcher(scopes.FetcherOptions{ - APIHost: apiHost.baseRESTURL.String(), - }) + fetcher := scopes.NewFetcher(apiHost, scopes.FetcherOptions{}) return fetcher.FetchTokenScopes(ctx, token) } diff --git a/internal/ghmcp/server_test.go b/internal/ghmcp/server_test.go index 2139aa280f..6f0e3ac3f3 100644 --- a/internal/ghmcp/server_test.go +++ b/internal/ghmcp/server_test.go @@ -1,113 +1 @@ package ghmcp - -import ( - "testing" - - "github.com/github/github-mcp-server/pkg/translations" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// TestNewMCPServer_CreatesSuccessfully verifies that the server can be created -// with the deps injection middleware properly configured. -func TestNewMCPServer_CreatesSuccessfully(t *testing.T) { - t.Parallel() - - // Create a minimal server configuration - cfg := MCPServerConfig{ - Version: "test", - Host: "", // defaults to github.com - Token: "test-token", - EnabledToolsets: []string{"context"}, - ReadOnly: false, - Translator: translations.NullTranslationHelper, - ContentWindowSize: 5000, - LockdownMode: false, - InsidersMode: false, - } - - // Create the server - server, err := NewMCPServer(cfg) - require.NoError(t, err, "expected server creation to succeed") - require.NotNil(t, server, "expected server to be non-nil") - - // The fact that the server was created successfully indicates that: - // 1. The deps injection middleware is properly added - // 2. Tools can be registered without panicking - // - // If the middleware wasn't properly added, tool calls would panic with - // "ToolDependencies not found in context" when executed. - // - // The actual middleware functionality and tool execution with ContextWithDeps - // is already tested in pkg/github/*_test.go. -} - -// TestResolveEnabledToolsets verifies the toolset resolution logic. -func TestResolveEnabledToolsets(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - cfg MCPServerConfig - expectedResult []string - }{ - { - name: "nil toolsets without dynamic mode and no tools - use defaults", - cfg: MCPServerConfig{ - EnabledToolsets: nil, - DynamicToolsets: false, - EnabledTools: nil, - }, - expectedResult: nil, // nil means "use defaults" - }, - { - name: "nil toolsets with dynamic mode - start empty", - cfg: MCPServerConfig{ - EnabledToolsets: nil, - DynamicToolsets: true, - EnabledTools: nil, - }, - expectedResult: []string{}, // empty slice means no toolsets - }, - { - name: "explicit toolsets", - cfg: MCPServerConfig{ - EnabledToolsets: []string{"repos", "issues"}, - DynamicToolsets: false, - }, - expectedResult: []string{"repos", "issues"}, - }, - { - name: "empty toolsets - disable all", - cfg: MCPServerConfig{ - EnabledToolsets: []string{}, - DynamicToolsets: false, - }, - expectedResult: []string{}, // empty slice means no toolsets - }, - { - name: "specific tools without toolsets - no default toolsets", - cfg: MCPServerConfig{ - EnabledToolsets: nil, - DynamicToolsets: false, - EnabledTools: []string{"get_me"}, - }, - expectedResult: []string{}, // empty slice when tools specified but no toolsets - }, - { - name: "dynamic mode with explicit toolsets removes all and default", - cfg: MCPServerConfig{ - EnabledToolsets: []string{"all", "repos"}, - DynamicToolsets: true, - }, - expectedResult: []string{"repos"}, // "all" is removed in dynamic mode - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := resolveEnabledToolsets(tc.cfg) - assert.Equal(t, tc.expectedResult, result) - }) - } -} diff --git a/internal/toolsnaps/toolsnaps_test.go b/internal/toolsnaps/toolsnaps_test.go index c7d7301bca..b1138df866 100644 --- a/internal/toolsnaps/toolsnaps_test.go +++ b/internal/toolsnaps/toolsnaps_test.go @@ -195,23 +195,23 @@ func TestToolSnapKeysSorted(t *testing.T) { // Given a tool with fields that could be in any order type complexTool struct { - Name string `json:"name"` - Description string `json:"description"` - Properties map[string]interface{} `json:"properties"` - Annotations map[string]interface{} `json:"annotations"` + Name string `json:"name"` + Description string `json:"description"` + Properties map[string]any `json:"properties"` + Annotations map[string]any `json:"annotations"` } tool := complexTool{ Name: "test_tool", Description: "A test tool", - Properties: map[string]interface{}{ + Properties: map[string]any{ "zzz": "last", "aaa": "first", "mmm": "middle", - "owner": map[string]interface{}{"type": "string", "description": "Owner"}, - "repo": map[string]interface{}{"type": "string", "description": "Repo"}, + "owner": map[string]any{"type": "string", "description": "Owner"}, + "repo": map[string]any{"type": "string", "description": "Repo"}, }, - Annotations: map[string]interface{}{ + Annotations: map[string]any{ "readOnly": true, "title": "Test", }, @@ -227,7 +227,7 @@ func TestToolSnapKeysSorted(t *testing.T) { require.NoError(t, err) // Verify that the JSON is properly sorted by checking key order - var parsed map[string]interface{} + var parsed map[string]any err = json.Unmarshal(snapJSON, &parsed) require.NoError(t, err) @@ -285,7 +285,7 @@ func TestStructFieldOrderingSortedAlphabetically(t *testing.T) { aFieldIndex := -1 mFieldIndex := -1 zFieldIndex := -1 - for i := 0; i < len(snapStr)-7; i++ { + for i := range len(snapStr) - 7 { switch snapStr[i : i+6] { case "aField": aFieldIndex = i diff --git a/pkg/buffer/buffer.go b/pkg/buffer/buffer.go index 54ed0be4d8..23cc818e1f 100644 --- a/pkg/buffer/buffer.go +++ b/pkg/buffer/buffer.go @@ -32,6 +32,9 @@ const maxLineSize = 10 * 1024 * 1024 // If the response contains more lines than maxJobLogLines, only the most recent lines are kept. // Lines exceeding maxLineSize are truncated with a marker. func ProcessResponseAsRingBufferToEnd(httpResp *http.Response, maxJobLogLines int) (string, int, *http.Response, error) { + if maxJobLogLines <= 0 { + maxJobLogLines = 500 + } if maxJobLogLines > 100000 { maxJobLogLines = 100000 } @@ -112,17 +115,14 @@ func ProcessResponseAsRingBufferToEnd(httpResp *http.Response, maxJobLogLines in } var result []string - linesInBuffer := totalLines - if linesInBuffer > maxJobLogLines { - linesInBuffer = maxJobLogLines - } + linesInBuffer := min(totalLines, maxJobLogLines) startIndex := 0 if totalLines > maxJobLogLines { startIndex = writeIndex } - for i := 0; i < linesInBuffer; i++ { + for i := range linesInBuffer { idx := (startIndex + i) % maxJobLogLines if validLines[idx] { result = append(result, lines[idx]) diff --git a/pkg/context/graphql_features.go b/pkg/context/graphql_features.go new file mode 100644 index 0000000000..ebba3f757b --- /dev/null +++ b/pkg/context/graphql_features.go @@ -0,0 +1,19 @@ +package context + +import "context" + +// graphQLFeaturesKey is a context key for GraphQL feature flags +type graphQLFeaturesKey struct{} + +// withGraphQLFeatures adds GraphQL feature flags to the context +func WithGraphQLFeatures(ctx context.Context, features ...string) context.Context { + return context.WithValue(ctx, graphQLFeaturesKey{}, features) +} + +// GetGraphQLFeatures retrieves GraphQL feature flags from the context +func GetGraphQLFeatures(ctx context.Context) []string { + if features, ok := ctx.Value(graphQLFeaturesKey{}).([]string); ok { + return features + } + return nil +} diff --git a/pkg/context/mcp_info.go b/pkg/context/mcp_info.go new file mode 100644 index 0000000000..ce55056821 --- /dev/null +++ b/pkg/context/mcp_info.go @@ -0,0 +1,39 @@ +package context + +import "context" + +type mcpMethodInfoCtx string + +var mcpMethodInfoCtxKey mcpMethodInfoCtx = "mcpmethodinfo" + +// MCPMethodInfo contains pre-parsed MCP method information extracted from the JSON-RPC request. +// This is populated early in the request lifecycle to enable: +// - Inventory filtering via ForMCPRequest (only register needed tools/resources/prompts) +// - Avoiding duplicate JSON parsing in middlewares (secret-scanning, scope-challenge) +// - Performance optimization for per-request server creation +type MCPMethodInfo struct { + // Method is the MCP method being called (e.g., "tools/call", "tools/list", "initialize") + Method string + // ItemName is the name of the specific item being accessed (tool name, resource URI, prompt name) + // Only populated for call/get methods (tools/call, prompts/get, resources/read) + ItemName string + // Owner is the repository owner from tool call arguments, if present + Owner string + // Repo is the repository name from tool call arguments, if present + Repo string + // Arguments contains the raw tool arguments for tools/call requests + Arguments map[string]any +} + +// WithMCPMethodInfo stores the MCPMethodInfo in the context. +func WithMCPMethodInfo(ctx context.Context, info *MCPMethodInfo) context.Context { + return context.WithValue(ctx, mcpMethodInfoCtxKey, info) +} + +// MCPMethod retrieves the MCPMethodInfo from the context. +func MCPMethod(ctx context.Context) (*MCPMethodInfo, bool) { + if info, ok := ctx.Value(mcpMethodInfoCtxKey).(*MCPMethodInfo); ok { + return info, true + } + return nil, false +} diff --git a/pkg/context/request.go b/pkg/context/request.go new file mode 100644 index 0000000000..9af925fc1e --- /dev/null +++ b/pkg/context/request.go @@ -0,0 +1,115 @@ +package context + +import "context" + +// readonlyCtxKey is a context key for read-only mode +type readonlyCtxKey struct{} + +// WithReadonly adds read-only mode state to the context +func WithReadonly(ctx context.Context, enabled bool) context.Context { + return context.WithValue(ctx, readonlyCtxKey{}, enabled) +} + +// IsReadonly retrieves the read-only mode state from the context +func IsReadonly(ctx context.Context) bool { + if enabled, ok := ctx.Value(readonlyCtxKey{}).(bool); ok { + return enabled + } + return false +} + +// toolsetsCtxKey is a context key for the active toolsets +type toolsetsCtxKey struct{} + +// WithToolsets adds the active toolsets to the context +func WithToolsets(ctx context.Context, toolsets []string) context.Context { + return context.WithValue(ctx, toolsetsCtxKey{}, toolsets) +} + +// GetToolsets retrieves the active toolsets from the context +func GetToolsets(ctx context.Context) []string { + if toolsets, ok := ctx.Value(toolsetsCtxKey{}).([]string); ok { + return toolsets + } + return nil +} + +// toolsCtxKey is a context key for tools +type toolsCtxKey struct{} + +// WithTools adds the tools to the context +func WithTools(ctx context.Context, tools []string) context.Context { + return context.WithValue(ctx, toolsCtxKey{}, tools) +} + +// GetTools retrieves the tools from the context +func GetTools(ctx context.Context) []string { + if tools, ok := ctx.Value(toolsCtxKey{}).([]string); ok { + return tools + } + return nil +} + +// lockdownCtxKey is a context key for lockdown mode +type lockdownCtxKey struct{} + +// WithLockdownMode adds lockdown mode state to the context +func WithLockdownMode(ctx context.Context, enabled bool) context.Context { + return context.WithValue(ctx, lockdownCtxKey{}, enabled) +} + +// IsLockdownMode retrieves the lockdown mode state from the context +func IsLockdownMode(ctx context.Context) bool { + if enabled, ok := ctx.Value(lockdownCtxKey{}).(bool); ok { + return enabled + } + return false +} + +// insidersCtxKey is a context key for insiders mode +type insidersCtxKey struct{} + +// WithInsidersMode adds insiders mode state to the context +func WithInsidersMode(ctx context.Context, enabled bool) context.Context { + return context.WithValue(ctx, insidersCtxKey{}, enabled) +} + +// IsInsidersMode retrieves the insiders mode state from the context +func IsInsidersMode(ctx context.Context) bool { + if enabled, ok := ctx.Value(insidersCtxKey{}).(bool); ok { + return enabled + } + return false +} + +// excludeToolsCtxKey is a context key for excluded tools +type excludeToolsCtxKey struct{} + +// WithExcludeTools adds the excluded tools to the context +func WithExcludeTools(ctx context.Context, tools []string) context.Context { + return context.WithValue(ctx, excludeToolsCtxKey{}, tools) +} + +// GetExcludeTools retrieves the excluded tools from the context +func GetExcludeTools(ctx context.Context) []string { + if tools, ok := ctx.Value(excludeToolsCtxKey{}).([]string); ok { + return tools + } + return nil +} + +// headerFeaturesCtxKey is a context key for raw header feature flags +type headerFeaturesCtxKey struct{} + +// WithHeaderFeatures stores the raw feature flags from the X-MCP-Features header into context +func WithHeaderFeatures(ctx context.Context, features []string) context.Context { + return context.WithValue(ctx, headerFeaturesCtxKey{}, features) +} + +// GetHeaderFeatures retrieves the raw feature flags from context +func GetHeaderFeatures(ctx context.Context) []string { + if features, ok := ctx.Value(headerFeaturesCtxKey{}).([]string); ok { + return features + } + return nil +} diff --git a/pkg/context/token.go b/pkg/context/token.go new file mode 100644 index 0000000000..97091a922f --- /dev/null +++ b/pkg/context/token.go @@ -0,0 +1,42 @@ +package context + +import ( + "context" + + "github.com/github/github-mcp-server/pkg/utils" +) + +type tokenCtxKey struct{} + +type TokenInfo struct { + Token string + TokenType utils.TokenType +} + +// WithTokenInfo adds TokenInfo to the context +func WithTokenInfo(ctx context.Context, tokenInfo *TokenInfo) context.Context { + return context.WithValue(ctx, tokenCtxKey{}, tokenInfo) +} + +// GetTokenInfo retrieves the authentication token from the context +func GetTokenInfo(ctx context.Context) (*TokenInfo, bool) { + if tokenInfo, ok := ctx.Value(tokenCtxKey{}).(*TokenInfo); ok { + return tokenInfo, true + } + return nil, false +} + +type tokenScopesKey struct{} + +// WithTokenScopes adds token scopes to the context +func WithTokenScopes(ctx context.Context, scopes []string) context.Context { + return context.WithValue(ctx, tokenScopesKey{}, scopes) +} + +// GetTokenScopes retrieves token scopes from the context +func GetTokenScopes(ctx context.Context) ([]string, bool) { + if scopes, ok := ctx.Value(tokenScopesKey{}).([]string); ok { + return scopes, true + } + return nil, false +} diff --git a/pkg/errors/error.go b/pkg/errors/error.go index 93ea852a87..d757651592 100644 --- a/pkg/errors/error.go +++ b/pkg/errors/error.go @@ -6,7 +6,7 @@ import ( "net/http" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/errors/error_test.go b/pkg/errors/error_test.go index 072a09a289..e33d5bd39e 100644 --- a/pkg/errors/error_test.go +++ b/pkg/errors/error_test.go @@ -6,7 +6,7 @@ import ( "net/http" "testing" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/github/__toolsnaps__/add_project_item.snap b/pkg/github/__toolsnaps__/add_project_item.snap deleted file mode 100644 index e6a5cc3c46..0000000000 --- a/pkg/github/__toolsnaps__/add_project_item.snap +++ /dev/null @@ -1,47 +0,0 @@ -{ - "annotations": { - "title": "Add project item" - }, - "description": "Add a specific Project item for a user or org", - "inputSchema": { - "properties": { - "item_id": { - "description": "The numeric ID of the issue or pull request to add to the project.", - "type": "number" - }, - "item_type": { - "description": "The item's type, either issue or pull_request.", - "enum": [ - "issue", - "pull_request" - ], - "type": "string" - }, - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "project_number": { - "description": "The project's number.", - "type": "number" - } - }, - "required": [ - "owner_type", - "owner", - "project_number", - "item_type", - "item_id" - ], - "type": "object" - }, - "name": "add_project_item" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/add_reply_to_pull_request_comment.snap b/pkg/github/__toolsnaps__/add_reply_to_pull_request_comment.snap new file mode 100644 index 0000000000..e2187478e8 --- /dev/null +++ b/pkg/github/__toolsnaps__/add_reply_to_pull_request_comment.snap @@ -0,0 +1,39 @@ +{ + "annotations": { + "title": "Add reply to pull request comment" + }, + "description": "Add a reply to an existing pull request comment. This creates a new comment that is linked as a reply to the specified comment.", + "inputSchema": { + "properties": { + "body": { + "description": "The text of the reply", + "type": "string" + }, + "commentId": { + "description": "The ID of the comment to reply to", + "type": "number" + }, + "owner": { + "description": "Repository owner", + "type": "string" + }, + "pullNumber": { + "description": "Pull request number", + "type": "number" + }, + "repo": { + "description": "Repository name", + "type": "string" + } + }, + "required": [ + "owner", + "repo", + "pullNumber", + "commentId", + "body" + ], + "type": "object" + }, + "name": "add_reply_to_pull_request_comment" +} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/cancel_workflow_run.snap b/pkg/github/__toolsnaps__/cancel_workflow_run.snap deleted file mode 100644 index 40bcae7401..0000000000 --- a/pkg/github/__toolsnaps__/cancel_workflow_run.snap +++ /dev/null @@ -1,29 +0,0 @@ -{ - "annotations": { - "title": "Cancel workflow run" - }, - "description": "Cancel a workflow run", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "cancel_workflow_run" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/create_pull_request.snap b/pkg/github/__toolsnaps__/create_pull_request.snap index cc22897faa..a8a94ce690 100644 --- a/pkg/github/__toolsnaps__/create_pull_request.snap +++ b/pkg/github/__toolsnaps__/create_pull_request.snap @@ -1,4 +1,13 @@ { + "_meta": { + "ui": { + "resourceUri": "ui://github-mcp-server/pr-write", + "visibility": [ + "model", + "app" + ] + } + }, "annotations": { "title": "Open new pull request" }, diff --git a/pkg/github/__toolsnaps__/delete_project_item.snap b/pkg/github/__toolsnaps__/delete_project_item.snap deleted file mode 100644 index 819fb84743..0000000000 --- a/pkg/github/__toolsnaps__/delete_project_item.snap +++ /dev/null @@ -1,39 +0,0 @@ -{ - "annotations": { - "destructiveHint": true, - "title": "Delete project item" - }, - "description": "Delete a specific Project item for a user or org", - "inputSchema": { - "properties": { - "item_id": { - "description": "The internal project item ID to delete from the project (not the issue or pull request ID).", - "type": "number" - }, - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "project_number": { - "description": "The project's number.", - "type": "number" - } - }, - "required": [ - "owner_type", - "owner", - "project_number", - "item_id" - ], - "type": "object" - }, - "name": "delete_project_item" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/delete_workflow_run_logs.snap b/pkg/github/__toolsnaps__/delete_workflow_run_logs.snap deleted file mode 100644 index 2e2de73312..0000000000 --- a/pkg/github/__toolsnaps__/delete_workflow_run_logs.snap +++ /dev/null @@ -1,30 +0,0 @@ -{ - "annotations": { - "destructiveHint": true, - "title": "Delete workflow logs" - }, - "description": "Delete logs for a workflow run", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "delete_workflow_run_logs" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/download_workflow_run_artifact.snap b/pkg/github/__toolsnaps__/download_workflow_run_artifact.snap deleted file mode 100644 index e831b21d53..0000000000 --- a/pkg/github/__toolsnaps__/download_workflow_run_artifact.snap +++ /dev/null @@ -1,30 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "Download workflow artifact" - }, - "description": "Get download URL for a workflow run artifact", - "inputSchema": { - "properties": { - "artifact_id": { - "description": "The unique identifier of the artifact", - "type": "number" - }, - "owner": { - "description": "Repository owner", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - } - }, - "required": [ - "owner", - "repo", - "artifact_id" - ], - "type": "object" - }, - "name": "download_workflow_run_artifact" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/get_me.snap b/pkg/github/__toolsnaps__/get_me.snap index 4d7d2573b1..b451b49de6 100644 --- a/pkg/github/__toolsnaps__/get_me.snap +++ b/pkg/github/__toolsnaps__/get_me.snap @@ -1,4 +1,9 @@ { + "_meta": { + "ui": { + "resourceUri": "ui://github-mcp-server/get-me" + } + }, "annotations": { "readOnlyHint": true, "title": "Get my user profile" diff --git a/pkg/github/__toolsnaps__/get_project.snap b/pkg/github/__toolsnaps__/get_project.snap deleted file mode 100644 index 6ff320fe8c..0000000000 --- a/pkg/github/__toolsnaps__/get_project.snap +++ /dev/null @@ -1,34 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "Get project" - }, - "description": "Get Project for a user or org", - "inputSchema": { - "properties": { - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "project_number": { - "description": "The project's number", - "type": "number" - } - }, - "required": [ - "project_number", - "owner_type", - "owner" - ], - "type": "object" - }, - "name": "get_project" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/get_project_field.snap b/pkg/github/__toolsnaps__/get_project_field.snap deleted file mode 100644 index 9d884a20f9..0000000000 --- a/pkg/github/__toolsnaps__/get_project_field.snap +++ /dev/null @@ -1,39 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "Get project field" - }, - "description": "Get Project field for a user or org", - "inputSchema": { - "properties": { - "field_id": { - "description": "The field's id.", - "type": "number" - }, - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "project_number": { - "description": "The project's number.", - "type": "number" - } - }, - "required": [ - "owner_type", - "owner", - "project_number", - "field_id" - ], - "type": "object" - }, - "name": "get_project_field" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/get_project_item.snap b/pkg/github/__toolsnaps__/get_project_item.snap deleted file mode 100644 index 202bcc53ea..0000000000 --- a/pkg/github/__toolsnaps__/get_project_item.snap +++ /dev/null @@ -1,46 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "Get project item" - }, - "description": "Get a specific Project item for a user or org", - "inputSchema": { - "properties": { - "fields": { - "description": "Specific list of field IDs to include in the response (e.g. [\"102589\", \"985201\", \"169875\"]). If not provided, only the title field is included.", - "items": { - "type": "string" - }, - "type": "array" - }, - "item_id": { - "description": "The item's ID.", - "type": "number" - }, - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "project_number": { - "description": "The project's number.", - "type": "number" - } - }, - "required": [ - "owner_type", - "owner", - "project_number", - "item_id" - ], - "type": "object" - }, - "name": "get_project_item" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/get_workflow_run.snap b/pkg/github/__toolsnaps__/get_workflow_run.snap deleted file mode 100644 index e58ea0ba2c..0000000000 --- a/pkg/github/__toolsnaps__/get_workflow_run.snap +++ /dev/null @@ -1,30 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "Get workflow run" - }, - "description": "Get details of a specific workflow run", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "get_workflow_run" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/get_workflow_run_logs.snap b/pkg/github/__toolsnaps__/get_workflow_run_logs.snap deleted file mode 100644 index 8e76fbfc36..0000000000 --- a/pkg/github/__toolsnaps__/get_workflow_run_logs.snap +++ /dev/null @@ -1,30 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "Get workflow run logs" - }, - "description": "Download logs for a specific workflow run (EXPENSIVE: downloads ALL logs as ZIP. Consider using get_job_logs with failed_only=true for debugging failed jobs)", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "get_workflow_run_logs" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/get_workflow_run_usage.snap b/pkg/github/__toolsnaps__/get_workflow_run_usage.snap deleted file mode 100644 index 40069b8366..0000000000 --- a/pkg/github/__toolsnaps__/get_workflow_run_usage.snap +++ /dev/null @@ -1,30 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "Get workflow usage" - }, - "description": "Get usage metrics for a workflow run", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "get_workflow_run_usage" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/issue_write.snap b/pkg/github/__toolsnaps__/issue_write.snap index 4512eb6143..24cff5df97 100644 --- a/pkg/github/__toolsnaps__/issue_write.snap +++ b/pkg/github/__toolsnaps__/issue_write.snap @@ -1,4 +1,13 @@ { + "_meta": { + "ui": { + "resourceUri": "ui://github-mcp-server/issue-write", + "visibility": [ + "model", + "app" + ] + } + }, "annotations": { "title": "Create or update issue." }, diff --git a/pkg/github/__toolsnaps__/list_project_fields.snap b/pkg/github/__toolsnaps__/list_project_fields.snap deleted file mode 100644 index 5456388b2a..0000000000 --- a/pkg/github/__toolsnaps__/list_project_fields.snap +++ /dev/null @@ -1,46 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "List project fields" - }, - "description": "List Project fields for a user or org", - "inputSchema": { - "properties": { - "after": { - "description": "Forward pagination cursor from previous pageInfo.nextCursor.", - "type": "string" - }, - "before": { - "description": "Backward pagination cursor from previous pageInfo.prevCursor (rare).", - "type": "string" - }, - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "per_page": { - "description": "Results per page (max 50)", - "type": "number" - }, - "project_number": { - "description": "The project's number.", - "type": "number" - } - }, - "required": [ - "owner_type", - "owner", - "project_number" - ], - "type": "object" - }, - "name": "list_project_fields" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/list_project_items.snap b/pkg/github/__toolsnaps__/list_project_items.snap deleted file mode 100644 index 5089f43067..0000000000 --- a/pkg/github/__toolsnaps__/list_project_items.snap +++ /dev/null @@ -1,57 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "List project items" - }, - "description": "Search project items with advanced filtering", - "inputSchema": { - "properties": { - "after": { - "description": "Forward pagination cursor from previous pageInfo.nextCursor.", - "type": "string" - }, - "before": { - "description": "Backward pagination cursor from previous pageInfo.prevCursor (rare).", - "type": "string" - }, - "fields": { - "description": "Field IDs to include (e.g. [\"102589\", \"985201\"]). CRITICAL: Always provide to get field values. Without this, only titles returned.", - "items": { - "type": "string" - }, - "type": "array" - }, - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "per_page": { - "description": "Results per page (max 50)", - "type": "number" - }, - "project_number": { - "description": "The project's number.", - "type": "number" - }, - "query": { - "description": "Query string for advanced filtering of project items using GitHub's project filtering syntax.", - "type": "string" - } - }, - "required": [ - "owner_type", - "owner", - "project_number" - ], - "type": "object" - }, - "name": "list_project_items" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/list_projects.snap b/pkg/github/__toolsnaps__/list_projects.snap deleted file mode 100644 index be5a6713e6..0000000000 --- a/pkg/github/__toolsnaps__/list_projects.snap +++ /dev/null @@ -1,45 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "List projects" - }, - "description": "List Projects for a user or organization", - "inputSchema": { - "properties": { - "after": { - "description": "Forward pagination cursor from previous pageInfo.nextCursor.", - "type": "string" - }, - "before": { - "description": "Backward pagination cursor from previous pageInfo.prevCursor (rare).", - "type": "string" - }, - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "per_page": { - "description": "Results per page (max 50)", - "type": "number" - }, - "query": { - "description": "Filter projects by title text and open/closed state; permitted qualifiers: is:open, is:closed; examples: \"roadmap is:open\", \"is:open feature planning\".", - "type": "string" - } - }, - "required": [ - "owner_type", - "owner" - ], - "type": "object" - }, - "name": "list_projects" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/list_workflow_jobs.snap b/pkg/github/__toolsnaps__/list_workflow_jobs.snap deleted file mode 100644 index d8fed19652..0000000000 --- a/pkg/github/__toolsnaps__/list_workflow_jobs.snap +++ /dev/null @@ -1,49 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "List workflow jobs" - }, - "description": "List jobs for a specific workflow run", - "inputSchema": { - "properties": { - "filter": { - "description": "Filters jobs by their completed_at timestamp", - "enum": [ - "latest", - "all" - ], - "type": "string" - }, - "owner": { - "description": "Repository owner", - "type": "string" - }, - "page": { - "description": "Page number for pagination (min 1)", - "minimum": 1, - "type": "number" - }, - "perPage": { - "description": "Results per page for pagination (min 1, max 100)", - "maximum": 100, - "minimum": 1, - "type": "number" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "list_workflow_jobs" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/list_workflow_run_artifacts.snap b/pkg/github/__toolsnaps__/list_workflow_run_artifacts.snap deleted file mode 100644 index 664722901e..0000000000 --- a/pkg/github/__toolsnaps__/list_workflow_run_artifacts.snap +++ /dev/null @@ -1,41 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "List workflow artifacts" - }, - "description": "List artifacts for a workflow run", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "page": { - "description": "Page number for pagination (min 1)", - "minimum": 1, - "type": "number" - }, - "perPage": { - "description": "Results per page for pagination (min 1, max 100)", - "maximum": 100, - "minimum": 1, - "type": "number" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "list_workflow_run_artifacts" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/list_workflow_runs.snap b/pkg/github/__toolsnaps__/list_workflow_runs.snap deleted file mode 100644 index a9a9916c3a..0000000000 --- a/pkg/github/__toolsnaps__/list_workflow_runs.snap +++ /dev/null @@ -1,98 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "List workflow runs" - }, - "description": "List workflow runs for a specific workflow", - "inputSchema": { - "properties": { - "actor": { - "description": "Returns someone's workflow runs. Use the login for the user who created the workflow run.", - "type": "string" - }, - "branch": { - "description": "Returns workflow runs associated with a branch. Use the name of the branch.", - "type": "string" - }, - "event": { - "description": "Returns workflow runs for a specific event type", - "enum": [ - "branch_protection_rule", - "check_run", - "check_suite", - "create", - "delete", - "deployment", - "deployment_status", - "discussion", - "discussion_comment", - "fork", - "gollum", - "issue_comment", - "issues", - "label", - "merge_group", - "milestone", - "page_build", - "public", - "pull_request", - "pull_request_review", - "pull_request_review_comment", - "pull_request_target", - "push", - "registry_package", - "release", - "repository_dispatch", - "schedule", - "status", - "watch", - "workflow_call", - "workflow_dispatch", - "workflow_run" - ], - "type": "string" - }, - "owner": { - "description": "Repository owner", - "type": "string" - }, - "page": { - "description": "Page number for pagination (min 1)", - "minimum": 1, - "type": "number" - }, - "perPage": { - "description": "Results per page for pagination (min 1, max 100)", - "maximum": 100, - "minimum": 1, - "type": "number" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "status": { - "description": "Returns workflow runs with the check run status", - "enum": [ - "queued", - "in_progress", - "completed", - "requested", - "waiting" - ], - "type": "string" - }, - "workflow_id": { - "description": "The workflow ID or workflow file name", - "type": "string" - } - }, - "required": [ - "owner", - "repo", - "workflow_id" - ], - "type": "object" - }, - "name": "list_workflow_runs" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/list_workflows.snap b/pkg/github/__toolsnaps__/list_workflows.snap deleted file mode 100644 index b0e51e03a0..0000000000 --- a/pkg/github/__toolsnaps__/list_workflows.snap +++ /dev/null @@ -1,36 +0,0 @@ -{ - "annotations": { - "readOnlyHint": true, - "title": "List workflows" - }, - "description": "List workflows in a repository", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "page": { - "description": "Page number for pagination (min 1)", - "minimum": 1, - "type": "number" - }, - "perPage": { - "description": "Results per page for pagination (min 1, max 100)", - "maximum": 100, - "minimum": 1, - "type": "number" - }, - "repo": { - "description": "Repository name", - "type": "string" - } - }, - "required": [ - "owner", - "repo" - ], - "type": "object" - }, - "name": "list_workflows" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/projects_get.snap b/pkg/github/__toolsnaps__/projects_get.snap index cb5013d749..864f61d83f 100644 --- a/pkg/github/__toolsnaps__/projects_get.snap +++ b/pkg/github/__toolsnaps__/projects_get.snap @@ -26,7 +26,8 @@ "enum": [ "get_project", "get_project_field", - "get_project_item" + "get_project_item", + "get_project_status_update" ], "type": "string" }, @@ -45,12 +46,14 @@ "project_number": { "description": "The project's number.", "type": "number" + }, + "status_update_id": { + "description": "The node ID of the project status update. Required for 'get_project_status_update' method.", + "type": "string" } }, "required": [ - "method", - "owner", - "project_number" + "method" ], "type": "object" }, diff --git a/pkg/github/__toolsnaps__/projects_list.snap b/pkg/github/__toolsnaps__/projects_list.snap index f12452b5a2..c2bb0d3f49 100644 --- a/pkg/github/__toolsnaps__/projects_list.snap +++ b/pkg/github/__toolsnaps__/projects_list.snap @@ -26,7 +26,8 @@ "enum": [ "list_projects", "list_project_fields", - "list_project_items" + "list_project_items", + "list_project_status_updates" ], "type": "string" }, @@ -47,7 +48,7 @@ "type": "number" }, "project_number": { - "description": "The project's number. Required for 'list_project_fields' and 'list_project_items' methods.", + "description": "The project's number. Required for 'list_project_fields', 'list_project_items', and 'list_project_status_updates' methods.", "type": "number" }, "query": { diff --git a/pkg/github/__toolsnaps__/projects_write.snap b/pkg/github/__toolsnaps__/projects_write.snap index d2d871bcd2..f6d3197b84 100644 --- a/pkg/github/__toolsnaps__/projects_write.snap +++ b/pkg/github/__toolsnaps__/projects_write.snap @@ -3,9 +3,13 @@ "destructiveHint": true, "title": "Modify GitHub Project items" }, - "description": "Add, update, or delete project items in a GitHub Project.", + "description": "Add, update, or delete project items, or create status updates in a GitHub Project.", "inputSchema": { "properties": { + "body": { + "description": "The body of the status update (markdown). Used for 'create_project_status_update' method.", + "type": "string" + }, "issue_number": { "description": "The issue number (use when item_type is 'issue' for 'add_project_item' method). Provide either issue_number or pull_request_number.", "type": "number" @@ -35,7 +39,8 @@ "enum": [ "add_project_item", "update_project_item", - "delete_project_item" + "delete_project_item", + "create_project_status_update" ], "type": "string" }, @@ -59,6 +64,25 @@ "description": "The pull request number (use when item_type is 'pull_request' for 'add_project_item' method). Provide either issue_number or pull_request_number.", "type": "number" }, + "start_date": { + "description": "The start date of the status update in YYYY-MM-DD format. Used for 'create_project_status_update' method.", + "type": "string" + }, + "status": { + "description": "The status of the project. Used for 'create_project_status_update' method.", + "enum": [ + "INACTIVE", + "ON_TRACK", + "AT_RISK", + "OFF_TRACK", + "COMPLETE" + ], + "type": "string" + }, + "target_date": { + "description": "The target date of the status update in YYYY-MM-DD format. Used for 'create_project_status_update' method.", + "type": "string" + }, "updated_field": { "description": "Object consisting of the ID of the project field to update and the new value for the field. To clear the field, set value to null. Example: {\"id\": 123456, \"value\": \"New Value\"}. Required for 'update_project_item' method.", "type": "object" diff --git a/pkg/github/__toolsnaps__/rerun_failed_jobs.snap b/pkg/github/__toolsnaps__/rerun_failed_jobs.snap deleted file mode 100644 index 099c891533..0000000000 --- a/pkg/github/__toolsnaps__/rerun_failed_jobs.snap +++ /dev/null @@ -1,29 +0,0 @@ -{ - "annotations": { - "title": "Rerun failed jobs" - }, - "description": "Re-run only the failed jobs in a workflow run", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "rerun_failed_jobs" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/rerun_workflow_run.snap b/pkg/github/__toolsnaps__/rerun_workflow_run.snap deleted file mode 100644 index 946bd72f34..0000000000 --- a/pkg/github/__toolsnaps__/rerun_workflow_run.snap +++ /dev/null @@ -1,29 +0,0 @@ -{ - "annotations": { - "title": "Rerun workflow run" - }, - "description": "Re-run an entire workflow run", - "inputSchema": { - "properties": { - "owner": { - "description": "Repository owner", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "run_id": { - "description": "The unique identifier of the workflow run", - "type": "number" - } - }, - "required": [ - "owner", - "repo", - "run_id" - ], - "type": "object" - }, - "name": "rerun_workflow_run" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/run_workflow.snap b/pkg/github/__toolsnaps__/run_workflow.snap deleted file mode 100644 index 1b6c8993e4..0000000000 --- a/pkg/github/__toolsnaps__/run_workflow.snap +++ /dev/null @@ -1,38 +0,0 @@ -{ - "annotations": { - "title": "Run workflow" - }, - "description": "Run an Actions workflow by workflow ID or filename", - "inputSchema": { - "properties": { - "inputs": { - "description": "Inputs the workflow accepts", - "type": "object" - }, - "owner": { - "description": "Repository owner", - "type": "string" - }, - "ref": { - "description": "The git reference for the workflow. The reference can be a branch or tag name.", - "type": "string" - }, - "repo": { - "description": "Repository name", - "type": "string" - }, - "workflow_id": { - "description": "The workflow ID (numeric) or workflow file name (e.g., main.yml, ci.yaml)", - "type": "string" - } - }, - "required": [ - "owner", - "repo", - "workflow_id", - "ref" - ], - "type": "object" - }, - "name": "run_workflow" -} \ No newline at end of file diff --git a/pkg/github/__toolsnaps__/update_project_item.snap b/pkg/github/__toolsnaps__/update_project_item.snap deleted file mode 100644 index 9875907416..0000000000 --- a/pkg/github/__toolsnaps__/update_project_item.snap +++ /dev/null @@ -1,43 +0,0 @@ -{ - "annotations": { - "title": "Update project item" - }, - "description": "Update a specific Project item for a user or org", - "inputSchema": { - "properties": { - "item_id": { - "description": "The unique identifier of the project item. This is not the issue or pull request ID.", - "type": "number" - }, - "owner": { - "description": "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - "type": "string" - }, - "owner_type": { - "description": "Owner type", - "enum": [ - "user", - "org" - ], - "type": "string" - }, - "project_number": { - "description": "The project's number.", - "type": "number" - }, - "updated_field": { - "description": "Object consisting of the ID of the project field to update and the new value for the field. To clear the field, set value to null. Example: {\"id\": 123456, \"value\": \"New Value\"}", - "type": "object" - } - }, - "required": [ - "owner_type", - "owner", - "project_number", - "item_id", - "updated_field" - ], - "type": "object" - }, - "name": "update_project_item" -} \ No newline at end of file diff --git a/pkg/github/actions.go b/pkg/github/actions.go index d3e5aad8eb..c3b5bb8c71 100644 --- a/pkg/github/actions.go +++ b/pkg/github/actions.go @@ -16,7 +16,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) @@ -26,10 +26,6 @@ const ( DescriptionRepositoryName = "Repository name" ) -// FeatureFlagHoldbackConsolidatedActions is the feature flag that, when enabled, reverts to -// individual actions tools instead of the consolidated actions tools. -const FeatureFlagHoldbackConsolidatedActions = "mcp_holdback_consolidated_actions" - // Method constants for consolidated actions tools const ( actionsMethodListWorkflows = "list_workflows" @@ -49,1394 +45,155 @@ const ( actionsMethodDeleteWorkflowRunLogs = "delete_workflow_run_logs" ) -// ListWorkflows creates a tool to list workflows in a repository -func ListWorkflows(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "list_workflows", - Description: t("TOOL_LIST_WORKFLOWS_DESCRIPTION", "List workflows in a repository"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_LIST_WORKFLOWS_USER_TITLE", "List workflows"), - ReadOnlyHint: true, - }, - InputSchema: WithPagination(&jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - }, - Required: []string{"owner", "repo"}, - }), - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } +// handleFailedJobLogs gets logs for all failed jobs in a workflow run +func handleFailedJobLogs(ctx context.Context, client *github.Client, owner, repo string, runID int64, returnContent bool, tailLines int, contentWindowSize int) (*mcp.CallToolResult, any, error) { + // First, get all jobs for the workflow run + jobs, resp, err := client.Actions.ListWorkflowJobs(ctx, owner, repo, runID, &github.ListWorkflowJobsOptions{ + Filter: "latest", + }) + if err != nil { + return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to list workflow jobs", resp, err), nil, nil + } + defer func() { _ = resp.Body.Close() }() - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } + // Filter for failed jobs + var failedJobs []*github.WorkflowJob + for _, job := range jobs.Jobs { + if job.GetConclusion() == "failure" { + failedJobs = append(failedJobs, job) + } + } - // Get optional pagination parameters - pagination, err := OptionalPaginationParams(args) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } + if len(failedJobs) == 0 { + result := map[string]any{ + "message": "No failed jobs found in this workflow run", + "run_id": runID, + "total_jobs": len(jobs.Jobs), + "failed_jobs": 0, + } + r, _ := json.Marshal(result) + return utils.NewToolResultText(string(r)), nil, nil + } - // Set up list options - opts := &github.ListOptions{ - PerPage: pagination.PerPage, - Page: pagination.Page, + // Collect logs for all failed jobs + var logResults []map[string]any + for _, job := range failedJobs { + jobResult, resp, err := getJobLogData(ctx, client, owner, repo, job.GetID(), job.GetName(), returnContent, tailLines, contentWindowSize) + if err != nil { + // Continue with other jobs even if one fails + jobResult = map[string]any{ + "job_id": job.GetID(), + "job_name": job.GetName(), + "error": err.Error(), } + // Enable reporting of status codes and error causes + _, _ = ghErrors.NewGitHubAPIErrorToCtx(ctx, "failed to get job logs", resp, err) // Explicitly ignore error for graceful handling + } - workflows, resp, err := client.Actions.ListWorkflows(ctx, owner, repo, opts) - if err != nil { - return nil, nil, fmt.Errorf("failed to list workflows: %w", err) - } - defer func() { _ = resp.Body.Close() }() + logResults = append(logResults, jobResult) + } - r, err := json.Marshal(workflows) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } + result := map[string]any{ + "message": fmt.Sprintf("Retrieved logs for %d failed jobs", len(failedJobs)), + "run_id": runID, + "total_jobs": len(jobs.Jobs), + "failed_jobs": len(failedJobs), + "logs": logResults, + "return_format": map[string]bool{"content": returnContent, "urls": !returnContent}, + } - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} + r, err := json.Marshal(result) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal response: %w", err) + } -// ListWorkflowRuns creates a tool to list workflow runs for a specific workflow -func ListWorkflowRuns(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "list_workflow_runs", - Description: t("TOOL_LIST_WORKFLOW_RUNS_DESCRIPTION", "List workflow runs for a specific workflow"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_LIST_WORKFLOW_RUNS_USER_TITLE", "List workflow runs"), - ReadOnlyHint: true, - }, - InputSchema: WithPagination(&jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "workflow_id": { - Type: "string", - Description: "The workflow ID or workflow file name", - }, - "actor": { - Type: "string", - Description: "Returns someone's workflow runs. Use the login for the user who created the workflow run.", - }, - "branch": { - Type: "string", - Description: "Returns workflow runs associated with a branch. Use the name of the branch.", - }, - "event": { - Type: "string", - Description: "Returns workflow runs for a specific event type", - Enum: []any{ - "branch_protection_rule", - "check_run", - "check_suite", - "create", - "delete", - "deployment", - "deployment_status", - "discussion", - "discussion_comment", - "fork", - "gollum", - "issue_comment", - "issues", - "label", - "merge_group", - "milestone", - "page_build", - "public", - "pull_request", - "pull_request_review", - "pull_request_review_comment", - "pull_request_target", - "push", - "registry_package", - "release", - "repository_dispatch", - "schedule", - "status", - "watch", - "workflow_call", - "workflow_dispatch", - "workflow_run", - }, - }, - "status": { - Type: "string", - Description: "Returns workflow runs with the check run status", - Enum: []any{"queued", "in_progress", "completed", "requested", "waiting"}, - }, - }, - Required: []string{"owner", "repo", "workflow_id"}, - }), - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } + return utils.NewToolResultText(string(r)), nil, nil +} - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - workflowID, err := RequiredParam[string](args, "workflow_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } +// handleSingleJobLogs gets logs for a single job +func handleSingleJobLogs(ctx context.Context, client *github.Client, owner, repo string, jobID int64, returnContent bool, tailLines int, contentWindowSize int) (*mcp.CallToolResult, any, error) { + jobResult, resp, err := getJobLogData(ctx, client, owner, repo, jobID, "", returnContent, tailLines, contentWindowSize) + if err != nil { + return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to get job logs", resp, err), nil, nil + } - // Get optional filtering parameters - actor, err := OptionalParam[string](args, "actor") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - branch, err := OptionalParam[string](args, "branch") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - event, err := OptionalParam[string](args, "event") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - status, err := OptionalParam[string](args, "status") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } + r, err := json.Marshal(jobResult) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal response: %w", err) + } - // Get optional pagination parameters - pagination, err := OptionalPaginationParams(args) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } + return utils.NewToolResultText(string(r)), nil, nil +} - // Set up list options - opts := &github.ListWorkflowRunsOptions{ - Actor: actor, - Branch: branch, - Event: event, - Status: status, - ListOptions: github.ListOptions{ - PerPage: pagination.PerPage, - Page: pagination.Page, - }, - } +// getJobLogData retrieves log data for a single job, either as URL or content +func getJobLogData(ctx context.Context, client *github.Client, owner, repo string, jobID int64, jobName string, returnContent bool, tailLines int, contentWindowSize int) (map[string]any, *github.Response, error) { + // Get the download URL for the job logs + url, resp, err := client.Actions.GetWorkflowJobLogs(ctx, owner, repo, jobID, 1) + if err != nil { + return nil, resp, fmt.Errorf("failed to get job logs for job %d: %w", jobID, err) + } + defer func() { _ = resp.Body.Close() }() - workflowRuns, resp, err := client.Actions.ListWorkflowRunsByFileName(ctx, owner, repo, workflowID, opts) - if err != nil { - return nil, nil, fmt.Errorf("failed to list workflow runs: %w", err) - } - defer func() { _ = resp.Body.Close() }() + result := map[string]any{ + "job_id": jobID, + } + if jobName != "" { + result["job_name"] = jobName + } - r, err := json.Marshal(workflowRuns) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) + if returnContent { + // Download and return the actual log content + content, originalLength, httpResp, err := downloadLogContent(ctx, url.String(), tailLines, contentWindowSize) //nolint:bodyclose // Response body is closed in downloadLogContent, but we need to return httpResp + if err != nil { + // To keep the return value consistent wrap the response as a GitHub Response + ghRes := &github.Response{ + Response: httpResp, } + return nil, ghRes, fmt.Errorf("failed to download log content for job %d: %w", jobID, err) + } + result["logs_content"] = content + result["message"] = "Job logs content retrieved successfully" + result["original_length"] = originalLength + } else { + // Return just the URL + result["logs_url"] = url.String() + result["message"] = "Job logs are available for download" + result["note"] = "The logs_url provides a download link for the individual job logs in plain text format. Use return_content=true to get the actual log content." + } - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool + return result, resp, nil } -// RunWorkflow creates a tool to run an Actions workflow -func RunWorkflow(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "run_workflow", - Description: t("TOOL_RUN_WORKFLOW_DESCRIPTION", "Run an Actions workflow by workflow ID or filename"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_RUN_WORKFLOW_USER_TITLE", "Run workflow"), - ReadOnlyHint: false, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "workflow_id": { - Type: "string", - Description: "The workflow ID (numeric) or workflow file name (e.g., main.yml, ci.yaml)", - }, - "ref": { - Type: "string", - Description: "The git reference for the workflow. The reference can be a branch or tag name.", - }, - "inputs": { - Type: "object", - Description: "Inputs the workflow accepts", - }, - }, - Required: []string{"owner", "repo", "workflow_id", "ref"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } +func downloadLogContent(ctx context.Context, logURL string, tailLines int, maxLines int) (string, int, *http.Response, error) { + prof := profiler.New(nil, profiler.IsProfilingEnabled()) + finish := prof.Start(ctx, "log_buffer_processing") - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - workflowID, err := RequiredParam[string](args, "workflow_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - ref, err := RequiredParam[string](args, "ref") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - // Get optional inputs parameter - var inputs map[string]interface{} - if requestInputs, ok := args["inputs"]; ok { - if inputsMap, ok := requestInputs.(map[string]interface{}); ok { - inputs = inputsMap - } - } - - event := github.CreateWorkflowDispatchEventRequest{ - Ref: ref, - Inputs: inputs, - } - - var resp *github.Response - var workflowType string - - if workflowIDInt, parseErr := strconv.ParseInt(workflowID, 10, 64); parseErr == nil { - resp, err = client.Actions.CreateWorkflowDispatchEventByID(ctx, owner, repo, workflowIDInt, event) - workflowType = "workflow_id" - } else { - resp, err = client.Actions.CreateWorkflowDispatchEventByFileName(ctx, owner, repo, workflowID, event) - workflowType = "workflow_file" - } - - if err != nil { - return nil, nil, fmt.Errorf("failed to run workflow: %w", err) - } - defer func() { _ = resp.Body.Close() }() - - result := map[string]any{ - "message": "Workflow run has been queued", - "workflow_type": workflowType, - "workflow_id": workflowID, - "ref": ref, - "inputs": inputs, - "status": resp.Status, - "status_code": resp.StatusCode, - } - - r, err := json.Marshal(result) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// GetWorkflowRun creates a tool to get details of a specific workflow run -func GetWorkflowRun(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "get_workflow_run", - Description: t("TOOL_GET_WORKFLOW_RUN_DESCRIPTION", "Get details of a specific workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_GET_WORKFLOW_RUN_USER_TITLE", "Get workflow run"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) - - workflowRun, resp, err := client.Actions.GetWorkflowRunByID(ctx, owner, repo, runID) - if err != nil { - return nil, nil, fmt.Errorf("failed to get workflow run: %w", err) - } - defer func() { _ = resp.Body.Close() }() - - r, err := json.Marshal(workflowRun) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// GetWorkflowRunLogs creates a tool to download logs for a specific workflow run -func GetWorkflowRunLogs(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "get_workflow_run_logs", - Description: t("TOOL_GET_WORKFLOW_RUN_LOGS_DESCRIPTION", "Download logs for a specific workflow run (EXPENSIVE: downloads ALL logs as ZIP. Consider using get_job_logs with failed_only=true for debugging failed jobs)"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_GET_WORKFLOW_RUN_LOGS_USER_TITLE", "Get workflow run logs"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) - - // Get the download URL for the logs - url, resp, err := client.Actions.GetWorkflowRunLogs(ctx, owner, repo, runID, 1) - if err != nil { - return nil, nil, fmt.Errorf("failed to get workflow run logs: %w", err) - } - defer func() { _ = resp.Body.Close() }() - - // Create response with the logs URL and information - result := map[string]any{ - "logs_url": url.String(), - "message": "Workflow run logs are available for download", - "note": "The logs_url provides a download link for the complete workflow run logs as a ZIP archive. You can download this archive to extract and examine individual job logs.", - "warning": "This downloads ALL logs as a ZIP file which can be large and expensive. For debugging failed jobs, consider using get_job_logs with failed_only=true and run_id instead.", - "optimization_tip": "Use: get_job_logs with parameters {run_id: " + fmt.Sprintf("%d", runID) + ", failed_only: true} for more efficient failed job debugging", - } - - r, err := json.Marshal(result) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// ListWorkflowJobs creates a tool to list jobs for a specific workflow run -func ListWorkflowJobs(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "list_workflow_jobs", - Description: t("TOOL_LIST_WORKFLOW_JOBS_DESCRIPTION", "List jobs for a specific workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_LIST_WORKFLOW_JOBS_USER_TITLE", "List workflow jobs"), - ReadOnlyHint: true, - }, - InputSchema: WithPagination(&jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - "filter": { - Type: "string", - Description: "Filters jobs by their completed_at timestamp", - Enum: []any{"latest", "all"}, - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }), - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) - - // Get optional filtering parameters - filter, err := OptionalParam[string](args, "filter") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - // Get optional pagination parameters - pagination, err := OptionalPaginationParams(args) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - // Set up list options - opts := &github.ListWorkflowJobsOptions{ - Filter: filter, - ListOptions: github.ListOptions{ - PerPage: pagination.PerPage, - Page: pagination.Page, - }, - } - - jobs, resp, err := client.Actions.ListWorkflowJobs(ctx, owner, repo, runID, opts) - if err != nil { - return nil, nil, fmt.Errorf("failed to list workflow jobs: %w", err) - } - defer func() { _ = resp.Body.Close() }() - - // Add optimization tip for failed job debugging - response := map[string]any{ - "jobs": jobs, - "optimization_tip": "For debugging failed jobs, consider using get_job_logs with failed_only=true and run_id=" + fmt.Sprintf("%d", runID) + " to get logs directly without needing to list jobs first", - } - - r, err := json.Marshal(response) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// GetJobLogs creates a tool to download logs for a specific workflow job or efficiently get all failed job logs for a workflow run -func GetJobLogs(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "get_job_logs", - Description: t("TOOL_GET_JOB_LOGS_DESCRIPTION", "Download logs for a specific workflow job or efficiently get all failed job logs for a workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_GET_JOB_LOGS_USER_TITLE", "Get job logs"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "job_id": { - Type: "number", - Description: "The unique identifier of the workflow job (required for single job logs)", - }, - "run_id": { - Type: "number", - Description: "Workflow run ID (required when using failed_only)", - }, - "failed_only": { - Type: "boolean", - Description: "When true, gets logs for all failed jobs in run_id", - }, - "return_content": { - Type: "boolean", - Description: "Returns actual log content instead of URLs", - }, - "tail_lines": { - Type: "number", - Description: "Number of lines to return from the end of the log", - Default: json.RawMessage(`500`), - }, - }, - Required: []string{"owner", "repo"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - // Get optional parameters - jobID, err := OptionalIntParam(args, "job_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID, err := OptionalIntParam(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - failedOnly, err := OptionalParam[bool](args, "failed_only") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - returnContent, err := OptionalParam[bool](args, "return_content") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - tailLines, err := OptionalIntParam(args, "tail_lines") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - // Default to 500 lines if not specified - if tailLines == 0 { - tailLines = 500 - } - - // Validate parameters - if failedOnly && runID == 0 { - return utils.NewToolResultError("run_id is required when failed_only is true"), nil, nil - } - if !failedOnly && jobID == 0 { - return utils.NewToolResultError("job_id is required when failed_only is false"), nil, nil - } - - if failedOnly && runID > 0 { - // Handle failed-only mode: get logs for all failed jobs in the workflow run - return handleFailedJobLogs(ctx, client, owner, repo, int64(runID), returnContent, tailLines, deps.GetContentWindowSize()) - } else if jobID > 0 { - // Handle single job mode - return handleSingleJobLogs(ctx, client, owner, repo, int64(jobID), returnContent, tailLines, deps.GetContentWindowSize()) - } - - return utils.NewToolResultError("Either job_id must be provided for single job logs, or run_id with failed_only=true for failed job logs"), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// handleFailedJobLogs gets logs for all failed jobs in a workflow run -func handleFailedJobLogs(ctx context.Context, client *github.Client, owner, repo string, runID int64, returnContent bool, tailLines int, contentWindowSize int) (*mcp.CallToolResult, any, error) { - // First, get all jobs for the workflow run - jobs, resp, err := client.Actions.ListWorkflowJobs(ctx, owner, repo, runID, &github.ListWorkflowJobsOptions{ - Filter: "latest", - }) - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to list workflow jobs", resp, err), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - // Filter for failed jobs - var failedJobs []*github.WorkflowJob - for _, job := range jobs.Jobs { - if job.GetConclusion() == "failure" { - failedJobs = append(failedJobs, job) - } - } - - if len(failedJobs) == 0 { - result := map[string]any{ - "message": "No failed jobs found in this workflow run", - "run_id": runID, - "total_jobs": len(jobs.Jobs), - "failed_jobs": 0, - } - r, _ := json.Marshal(result) - return utils.NewToolResultText(string(r)), nil, nil - } - - // Collect logs for all failed jobs - var logResults []map[string]any - for _, job := range failedJobs { - jobResult, resp, err := getJobLogData(ctx, client, owner, repo, job.GetID(), job.GetName(), returnContent, tailLines, contentWindowSize) - if err != nil { - // Continue with other jobs even if one fails - jobResult = map[string]any{ - "job_id": job.GetID(), - "job_name": job.GetName(), - "error": err.Error(), - } - // Enable reporting of status codes and error causes - _, _ = ghErrors.NewGitHubAPIErrorToCtx(ctx, "failed to get job logs", resp, err) // Explicitly ignore error for graceful handling - } - - logResults = append(logResults, jobResult) - } - - result := map[string]any{ - "message": fmt.Sprintf("Retrieved logs for %d failed jobs", len(failedJobs)), - "run_id": runID, - "total_jobs": len(jobs.Jobs), - "failed_jobs": len(failedJobs), - "logs": logResults, - "return_format": map[string]bool{"content": returnContent, "urls": !returnContent}, - } - - r, err := json.Marshal(result) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil -} - -// handleSingleJobLogs gets logs for a single job -func handleSingleJobLogs(ctx context.Context, client *github.Client, owner, repo string, jobID int64, returnContent bool, tailLines int, contentWindowSize int) (*mcp.CallToolResult, any, error) { - jobResult, resp, err := getJobLogData(ctx, client, owner, repo, jobID, "", returnContent, tailLines, contentWindowSize) - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to get job logs", resp, err), nil, nil - } - - r, err := json.Marshal(jobResult) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil -} - -// getJobLogData retrieves log data for a single job, either as URL or content -func getJobLogData(ctx context.Context, client *github.Client, owner, repo string, jobID int64, jobName string, returnContent bool, tailLines int, contentWindowSize int) (map[string]any, *github.Response, error) { - // Get the download URL for the job logs - url, resp, err := client.Actions.GetWorkflowJobLogs(ctx, owner, repo, jobID, 1) - if err != nil { - return nil, resp, fmt.Errorf("failed to get job logs for job %d: %w", jobID, err) - } - defer func() { _ = resp.Body.Close() }() - - result := map[string]any{ - "job_id": jobID, - } - if jobName != "" { - result["job_name"] = jobName - } - - if returnContent { - // Download and return the actual log content - content, originalLength, httpResp, err := downloadLogContent(ctx, url.String(), tailLines, contentWindowSize) //nolint:bodyclose // Response body is closed in downloadLogContent, but we need to return httpResp - if err != nil { - // To keep the return value consistent wrap the response as a GitHub Response - ghRes := &github.Response{ - Response: httpResp, - } - return nil, ghRes, fmt.Errorf("failed to download log content for job %d: %w", jobID, err) - } - result["logs_content"] = content - result["message"] = "Job logs content retrieved successfully" - result["original_length"] = originalLength - } else { - // Return just the URL - result["logs_url"] = url.String() - result["message"] = "Job logs are available for download" - result["note"] = "The logs_url provides a download link for the individual job logs in plain text format. Use return_content=true to get the actual log content." - } - - return result, resp, nil -} - -func downloadLogContent(ctx context.Context, logURL string, tailLines int, maxLines int) (string, int, *http.Response, error) { - prof := profiler.New(nil, profiler.IsProfilingEnabled()) - finish := prof.Start(ctx, "log_buffer_processing") - - httpResp, err := http.Get(logURL) //nolint:gosec - if err != nil { - return "", 0, httpResp, fmt.Errorf("failed to download logs: %w", err) - } - defer func() { _ = httpResp.Body.Close() }() - - if httpResp.StatusCode != http.StatusOK { - return "", 0, httpResp, fmt.Errorf("failed to download logs: HTTP %d", httpResp.StatusCode) - } - - bufferSize := tailLines - if bufferSize > maxLines { - bufferSize = maxLines - } - - processedInput, totalLines, httpResp, err := buffer.ProcessResponseAsRingBufferToEnd(httpResp, bufferSize) - if err != nil { - return "", 0, httpResp, fmt.Errorf("failed to process log content: %w", err) - } - - lines := strings.Split(processedInput, "\n") - if len(lines) > tailLines { - lines = lines[len(lines)-tailLines:] - } - finalResult := strings.Join(lines, "\n") - - _ = finish(len(lines), int64(len(finalResult))) - - return finalResult, totalLines, httpResp, nil -} - -// RerunWorkflowRun creates a tool to re-run an entire workflow run -func RerunWorkflowRun(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "rerun_workflow_run", - Description: t("TOOL_RERUN_WORKFLOW_RUN_DESCRIPTION", "Re-run an entire workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_RERUN_WORKFLOW_RUN_USER_TITLE", "Rerun workflow run"), - ReadOnlyHint: false, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) - - resp, err := client.Actions.RerunWorkflowByID(ctx, owner, repo, runID) - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to rerun workflow run", resp, err), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - result := map[string]any{ - "message": "Workflow run has been queued for re-run", - "run_id": runID, - "status": resp.Status, - "status_code": resp.StatusCode, - } - - r, err := json.Marshal(result) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// RerunFailedJobs creates a tool to re-run only the failed jobs in a workflow run -func RerunFailedJobs(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "rerun_failed_jobs", - Description: t("TOOL_RERUN_FAILED_JOBS_DESCRIPTION", "Re-run only the failed jobs in a workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_RERUN_FAILED_JOBS_USER_TITLE", "Rerun failed jobs"), - ReadOnlyHint: false, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) - - resp, err := client.Actions.RerunFailedJobsByID(ctx, owner, repo, runID) - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to rerun failed jobs", resp, err), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - result := map[string]any{ - "message": "Failed jobs have been queued for re-run", - "run_id": runID, - "status": resp.Status, - "status_code": resp.StatusCode, - } - - r, err := json.Marshal(result) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// CancelWorkflowRun creates a tool to cancel a workflow run -func CancelWorkflowRun(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "cancel_workflow_run", - Description: t("TOOL_CANCEL_WORKFLOW_RUN_DESCRIPTION", "Cancel a workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_CANCEL_WORKFLOW_RUN_USER_TITLE", "Cancel workflow run"), - ReadOnlyHint: false, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) - - resp, err := client.Actions.CancelWorkflowRunByID(ctx, owner, repo, runID) - if err != nil { - if _, ok := err.(*github.AcceptedError); !ok { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to cancel workflow run", resp, err), nil, nil - } - } - defer func() { _ = resp.Body.Close() }() - - result := map[string]any{ - "message": "Workflow run has been cancelled", - "run_id": runID, - "status": resp.Status, - "status_code": resp.StatusCode, - } - - r, err := json.Marshal(result) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// ListWorkflowRunArtifacts creates a tool to list artifacts for a workflow run -func ListWorkflowRunArtifacts(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "list_workflow_run_artifacts", - Description: t("TOOL_LIST_WORKFLOW_RUN_ARTIFACTS_DESCRIPTION", "List artifacts for a workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_LIST_WORKFLOW_RUN_ARTIFACTS_USER_TITLE", "List workflow artifacts"), - ReadOnlyHint: true, - }, - InputSchema: WithPagination(&jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }), - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) - - // Get optional pagination parameters - pagination, err := OptionalPaginationParams(args) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - // Set up list options - opts := &github.ListOptions{ - PerPage: pagination.PerPage, - Page: pagination.Page, - } - - artifacts, resp, err := client.Actions.ListWorkflowRunArtifacts(ctx, owner, repo, runID, opts) - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to list workflow run artifacts", resp, err), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - r, err := json.Marshal(artifacts) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// DownloadWorkflowRunArtifact creates a tool to download a workflow run artifact -func DownloadWorkflowRunArtifact(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "download_workflow_run_artifact", - Description: t("TOOL_DOWNLOAD_WORKFLOW_RUN_ARTIFACT_DESCRIPTION", "Get download URL for a workflow run artifact"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_DOWNLOAD_WORKFLOW_RUN_ARTIFACT_USER_TITLE", "Download workflow artifact"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "artifact_id": { - Type: "number", - Description: "The unique identifier of the artifact", - }, - }, - Required: []string{"owner", "repo", "artifact_id"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - artifactIDInt, err := RequiredInt(args, "artifact_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - artifactID := int64(artifactIDInt) - - // Get the download URL for the artifact - url, resp, err := client.Actions.DownloadArtifact(ctx, owner, repo, artifactID, 1) - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to get artifact download URL", resp, err), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - // Create response with the download URL and information - result := map[string]any{ - "download_url": url.String(), - "message": "Artifact is available for download", - "note": "The download_url provides a download link for the artifact as a ZIP archive. The link is temporary and expires after a short time.", - "artifact_id": artifactID, - } - - r, err := json.Marshal(result) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} - -// DeleteWorkflowRunLogs creates a tool to delete logs for a workflow run -func DeleteWorkflowRunLogs(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "delete_workflow_run_logs", - Description: t("TOOL_DELETE_WORKFLOW_RUN_LOGS_DESCRIPTION", "Delete logs for a workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_DELETE_WORKFLOW_RUN_LOGS_USER_TITLE", "Delete workflow logs"), - ReadOnlyHint: false, - DestructiveHint: jsonschema.Ptr(true), - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) - - resp, err := client.Actions.DeleteWorkflowRunLogs(ctx, owner, repo, runID) - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to delete workflow run logs", resp, err), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - result := map[string]any{ - "message": "Workflow run logs have been deleted", - "run_id": runID, - "status": resp.Status, - "status_code": resp.StatusCode, - } - - r, err := json.Marshal(result) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } + httpResp, err := http.Get(logURL) //nolint:gosec + if err != nil { + return "", 0, httpResp, fmt.Errorf("failed to download logs: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool -} + if httpResp.StatusCode != http.StatusOK { + return "", 0, httpResp, fmt.Errorf("failed to download logs: HTTP %d", httpResp.StatusCode) + } -// GetWorkflowRunUsage creates a tool to get usage metrics for a workflow run -func GetWorkflowRunUsage(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataActions, - mcp.Tool{ - Name: "get_workflow_run_usage", - Description: t("TOOL_GET_WORKFLOW_RUN_USAGE_DESCRIPTION", "Get usage metrics for a workflow run"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_GET_WORKFLOW_RUN_USAGE_USER_TITLE", "Get workflow usage"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: DescriptionRepositoryOwner, - }, - "repo": { - Type: "string", - Description: DescriptionRepositoryName, - }, - "run_id": { - Type: "number", - Description: "The unique identifier of the workflow run", - }, - }, - Required: []string{"owner", "repo", "run_id"}, - }, - }, - []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil - } + bufferSize := min(tailLines, maxLines) - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - repo, err := RequiredParam[string](args, "repo") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runIDInt, err := RequiredInt(args, "run_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - runID := int64(runIDInt) + processedInput, totalLines, httpResp, err := buffer.ProcessResponseAsRingBufferToEnd(httpResp, bufferSize) + if err != nil { + return "", 0, httpResp, fmt.Errorf("failed to process log content: %w", err) + } - usage, resp, err := client.Actions.GetWorkflowRunUsageByID(ctx, owner, repo, runID) - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to get workflow run usage", resp, err), nil, nil - } - defer func() { _ = resp.Body.Close() }() + lines := strings.Split(processedInput, "\n") + if len(lines) > tailLines { + lines = lines[len(lines)-tailLines:] + } + finalResult := strings.Join(lines, "\n") - r, err := json.Marshal(usage) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } + _ = finish(len(lines), int64(len(finalResult))) - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedActions - return tool + return finalResult, totalLines, httpResp, nil } // ActionsList returns the tool and handler for listing GitHub Actions resources. @@ -1631,7 +388,6 @@ Use this tool to list workflows in a repository, or list workflow runs, jobs, an } }, ) - tool.FeatureFlagDisable = FeatureFlagHoldbackConsolidatedActions return tool } @@ -1740,7 +496,6 @@ Use this tool to get details about individual workflows, workflow runs, jobs, an } }, ) - tool.FeatureFlagDisable = FeatureFlagHoldbackConsolidatedActions return tool } @@ -1819,9 +574,9 @@ func ActionsRunTrigger(t translations.TranslationHelperFunc) inventory.ServerToo runID, _ := OptionalIntParam(args, "run_id") // Get optional inputs parameter - var inputs map[string]interface{} + var inputs map[string]any if requestInputs, ok := args["inputs"]; ok { - if inputsMap, ok := requestInputs.(map[string]interface{}); ok { + if inputsMap, ok := requestInputs.(map[string]any); ok { inputs = inputsMap } } @@ -1859,7 +614,6 @@ func ActionsRunTrigger(t translations.TranslationHelperFunc) inventory.ServerToo } }, ) - tool.FeatureFlagDisable = FeatureFlagHoldbackConsolidatedActions return tool } @@ -1948,8 +702,8 @@ For single job logs, provide job_id. For all failed jobs in a run, provide run_i if err != nil { return utils.NewToolResultError(err.Error()), nil, nil } - // Default to 500 lines if not specified - if tailLines == 0 { + // Default to 500 lines if not specified or invalid + if tailLines <= 0 { tailLines = 500 } @@ -1977,7 +731,6 @@ For single job logs, provide job_id. For all failed jobs in a run, provide run_i return utils.NewToolResultError("Either job_id must be provided for single job logs, or run_id with failed_only=true for failed job logs"), nil, nil }, ) - tool.FeatureFlagDisable = FeatureFlagHoldbackConsolidatedActions return tool } @@ -2226,7 +979,7 @@ func getWorkflowRunUsage(ctx context.Context, client *github.Client, owner, repo return utils.NewToolResultText(string(r)), nil, nil } -func runWorkflow(ctx context.Context, client *github.Client, owner, repo, workflowID, ref string, inputs map[string]interface{}) (*mcp.CallToolResult, any, error) { +func runWorkflow(ctx context.Context, client *github.Client, owner, repo, workflowID, ref string, inputs map[string]any) (*mcp.CallToolResult, any, error) { event := github.CreateWorkflowDispatchEventRequest{ Ref: ref, Inputs: inputs, diff --git a/pkg/github/actions_test.go b/pkg/github/actions_test.go index 0d47236f66..fe960ed924 100644 --- a/pkg/github/actions_test.go +++ b/pkg/github/actions_test.go @@ -3,1823 +3,17 @@ package github import ( "context" "encoding/json" - "io" "net/http" - "net/http/httptest" - "os" - "runtime" - "runtime/debug" - "strings" "testing" - "github.com/github/github-mcp-server/internal/profiler" "github.com/github/github-mcp-server/internal/toolsnaps" - buffer "github.com/github/github-mcp-server/pkg/buffer" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func Test_ListWorkflows(t *testing.T) { - // Verify tool definition once - toolDef := ListWorkflows(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "list_workflows", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - inputSchema := toolDef.Tool.InputSchema.(*jsonschema.Schema) - assert.Contains(t, inputSchema.Properties, "owner") - assert.Contains(t, inputSchema.Properties, "repo") - assert.Contains(t, inputSchema.Properties, "perPage") - assert.Contains(t, inputSchema.Properties, "page") - assert.ElementsMatch(t, inputSchema.Required, []string{"owner", "repo"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful workflow listing", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsWorkflowsByOwnerByRepo: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - workflows := &github.Workflows{ - TotalCount: github.Ptr(2), - Workflows: []*github.Workflow{ - { - ID: github.Ptr(int64(123)), - Name: github.Ptr("CI"), - Path: github.Ptr(".github/workflows/ci.yml"), - State: github.Ptr("active"), - CreatedAt: &github.Timestamp{}, - UpdatedAt: &github.Timestamp{}, - URL: github.Ptr("https://api.github.com/repos/owner/repo/actions/workflows/123"), - HTMLURL: github.Ptr("https://github.com/owner/repo/actions/workflows/ci.yml"), - BadgeURL: github.Ptr("https://github.com/owner/repo/workflows/CI/badge.svg"), - NodeID: github.Ptr("W_123"), - }, - { - ID: github.Ptr(int64(456)), - Name: github.Ptr("Deploy"), - Path: github.Ptr(".github/workflows/deploy.yml"), - State: github.Ptr("active"), - CreatedAt: &github.Timestamp{}, - UpdatedAt: &github.Timestamp{}, - URL: github.Ptr("https://api.github.com/repos/owner/repo/actions/workflows/456"), - HTMLURL: github.Ptr("https://github.com/owner/repo/actions/workflows/deploy.yml"), - BadgeURL: github.Ptr("https://github.com/owner/repo/workflows/Deploy/badge.svg"), - NodeID: github.Ptr("W_456"), - }, - }, - } - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(workflows) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: false, - }, - { - name: "missing required parameter owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: owner", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content if no error - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - // Unmarshal and verify the result - var response github.Workflows - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.NotNil(t, response.TotalCount) - assert.Greater(t, *response.TotalCount, 0) - assert.NotEmpty(t, response.Workflows) - }) - } -} - -func Test_RunWorkflow(t *testing.T) { - // Verify tool definition once - toolDef := RunWorkflow(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "run_workflow", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "owner") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "repo") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "workflow_id") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "ref") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "inputs") - assert.ElementsMatch(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Required, []string{"owner", "repo", "workflow_id", "ref"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful workflow run", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostReposActionsWorkflowsDispatchesByOwnerByRepoByWorkflowID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusNoContent) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "workflow_id": "12345", - "ref": "main", - }, - expectError: false, - }, - { - name: "missing required parameter workflow_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "ref": "main", - }, - expectError: true, - expectedErrMsg: "missing required parameter: workflow_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content if no error - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - // Unmarshal and verify the result - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Equal(t, "Workflow run has been queued", response["message"]) - assert.Contains(t, response, "workflow_type") - }) - } -} - -func Test_RunWorkflow_WithFilename(t *testing.T) { - // Test the unified RunWorkflow function with filenames - toolDef := RunWorkflow(translations.NullTranslationHelper) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful workflow run by filename", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostReposActionsWorkflowsDispatchesByOwnerByRepoByWorkflowID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusNoContent) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "workflow_id": "ci.yml", - "ref": "main", - }, - expectError: false, - }, - { - name: "successful workflow run by numeric ID as string", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostReposActionsWorkflowsDispatchesByOwnerByRepoByWorkflowID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusNoContent) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "workflow_id": "12345", - "ref": "main", - }, - expectError: false, - }, - { - name: "missing required parameter workflow_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "ref": "main", - }, - expectError: true, - expectedErrMsg: "missing required parameter: workflow_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content if no error - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - // Unmarshal and verify the result - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Equal(t, "Workflow run has been queued", response["message"]) - assert.Contains(t, response, "workflow_type") - }) - } -} - -func Test_CancelWorkflowRun(t *testing.T) { - // Verify tool definition once - toolDef := CancelWorkflowRun(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "cancel_workflow_run", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "owner") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "repo") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "run_id") - assert.ElementsMatch(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Required, []string{"owner", "repo", "run_id"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful workflow run cancellation", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - "POST /repos/owner/repo/actions/runs/12345/cancel": http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusAccepted) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "conflict when cancelling a workflow run", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - "POST /repos/owner/repo/actions/runs/12345/cancel": http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusConflict) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: true, - expectedErrMsg: "failed to cancel workflow run", - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Contains(t, textContent.Text, tc.expectedErrMsg) - return - } - - // Unmarshal and verify the result - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Equal(t, "Workflow run has been cancelled", response["message"]) - assert.Equal(t, float64(12345), response["run_id"]) - }) - } -} - -func Test_ListWorkflowRunArtifacts(t *testing.T) { - // Verify tool definition once - toolDef := ListWorkflowRunArtifacts(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "list_workflow_run_artifacts", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "owner") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "repo") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "run_id") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "perPage") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "page") - assert.ElementsMatch(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Required, []string{"owner", "repo", "run_id"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful artifacts listing", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsRunsArtifactsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - artifacts := &github.ArtifactList{ - TotalCount: github.Ptr(int64(2)), - Artifacts: []*github.Artifact{ - { - ID: github.Ptr(int64(1)), - NodeID: github.Ptr("A_1"), - Name: github.Ptr("build-artifacts"), - SizeInBytes: github.Ptr(int64(1024)), - URL: github.Ptr("https://api.github.com/repos/owner/repo/actions/artifacts/1"), - ArchiveDownloadURL: github.Ptr("https://api.github.com/repos/owner/repo/actions/artifacts/1/zip"), - Expired: github.Ptr(false), - CreatedAt: &github.Timestamp{}, - UpdatedAt: &github.Timestamp{}, - ExpiresAt: &github.Timestamp{}, - WorkflowRun: &github.ArtifactWorkflowRun{ - ID: github.Ptr(int64(12345)), - RepositoryID: github.Ptr(int64(1)), - HeadRepositoryID: github.Ptr(int64(1)), - HeadBranch: github.Ptr("main"), - HeadSHA: github.Ptr("abc123"), - }, - }, - { - ID: github.Ptr(int64(2)), - NodeID: github.Ptr("A_2"), - Name: github.Ptr("test-results"), - SizeInBytes: github.Ptr(int64(512)), - URL: github.Ptr("https://api.github.com/repos/owner/repo/actions/artifacts/2"), - ArchiveDownloadURL: github.Ptr("https://api.github.com/repos/owner/repo/actions/artifacts/2/zip"), - Expired: github.Ptr(false), - CreatedAt: &github.Timestamp{}, - UpdatedAt: &github.Timestamp{}, - ExpiresAt: &github.Timestamp{}, - WorkflowRun: &github.ArtifactWorkflowRun{ - ID: github.Ptr(int64(12345)), - RepositoryID: github.Ptr(int64(1)), - HeadRepositoryID: github.Ptr(int64(1)), - HeadBranch: github.Ptr("main"), - HeadSHA: github.Ptr("abc123"), - }, - }, - }, - } - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(artifacts) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content if no error - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - // Unmarshal and verify the result - var response github.ArtifactList - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.NotNil(t, response.TotalCount) - assert.Greater(t, *response.TotalCount, int64(0)) - assert.NotEmpty(t, response.Artifacts) - }) - } -} - -func Test_DownloadWorkflowRunArtifact(t *testing.T) { - // Verify tool definition once - toolDef := DownloadWorkflowRunArtifact(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "download_workflow_run_artifact", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "owner") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "repo") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "artifact_id") - assert.ElementsMatch(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Required, []string{"owner", "repo", "artifact_id"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful artifact download URL", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - "GET /repos/owner/repo/actions/artifacts/123/zip": http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - // GitHub returns a 302 redirect to the download URL - w.Header().Set("Location", "https://api.github.com/repos/owner/repo/actions/artifacts/123/download") - w.WriteHeader(http.StatusFound) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "artifact_id": float64(123), - }, - expectError: false, - }, - { - name: "missing required parameter artifact_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: artifact_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content if no error - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - // Unmarshal and verify the result - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Contains(t, response, "download_url") - assert.Contains(t, response, "message") - assert.Equal(t, "Artifact is available for download", response["message"]) - assert.Equal(t, float64(123), response["artifact_id"]) - }) - } -} - -func Test_DeleteWorkflowRunLogs(t *testing.T) { - // Verify tool definition once - toolDef := DeleteWorkflowRunLogs(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "delete_workflow_run_logs", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "owner") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "repo") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "run_id") - assert.ElementsMatch(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Required, []string{"owner", "repo", "run_id"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful logs deletion", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - DeleteReposActionsRunsLogsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusNoContent) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content if no error - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - // Unmarshal and verify the result - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Equal(t, "Workflow run logs have been deleted", response["message"]) - assert.Equal(t, float64(12345), response["run_id"]) - }) - } -} - -func Test_GetWorkflowRunUsage(t *testing.T) { - // Verify tool definition once - toolDef := GetWorkflowRunUsage(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "get_workflow_run_usage", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "owner") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "repo") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "run_id") - assert.ElementsMatch(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Required, []string{"owner", "repo", "run_id"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful workflow run usage", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsRunsTimingByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - usage := &github.WorkflowRunUsage{ - Billable: &github.WorkflowRunBillMap{ - "UBUNTU": &github.WorkflowRunBill{ - TotalMS: github.Ptr(int64(120000)), - Jobs: github.Ptr(2), - JobRuns: []*github.WorkflowRunJobRun{ - { - JobID: github.Ptr(1), - DurationMS: github.Ptr(int64(60000)), - }, - { - JobID: github.Ptr(2), - DurationMS: github.Ptr(int64(60000)), - }, - }, - }, - }, - RunDurationMS: github.Ptr(int64(120000)), - } - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(usage) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content if no error - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - // Unmarshal and verify the result - var response github.WorkflowRunUsage - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.NotNil(t, response.RunDurationMS) - assert.NotNil(t, response.Billable) - }) - } -} - -func Test_GetJobLogs(t *testing.T) { - // Verify tool definition once - toolDef := GetJobLogs(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "get_job_logs", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "owner") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "repo") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "job_id") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "run_id") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "failed_only") - assert.Contains(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Properties, "return_content") - assert.ElementsMatch(t, toolDef.Tool.InputSchema.(*jsonschema.Schema).Required, []string{"owner", "repo"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - checkResponse func(t *testing.T, response map[string]any) - }{ - { - name: "successful single job logs with URL", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsJobsLogsByOwnerByRepoByJobID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Location", "https://github.com/logs/job/123") - w.WriteHeader(http.StatusFound) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "job_id": float64(123), - }, - expectError: false, - checkResponse: func(t *testing.T, response map[string]any) { - assert.Equal(t, float64(123), response["job_id"]) - assert.Contains(t, response, "logs_url") - assert.Equal(t, "Job logs are available for download", response["message"]) - assert.Contains(t, response, "note") - }, - }, - { - name: "successful failed jobs logs", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsRunsJobsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - jobs := &github.Jobs{ - TotalCount: github.Ptr(3), - Jobs: []*github.WorkflowJob{ - { - ID: github.Ptr(int64(1)), - Name: github.Ptr("test-job-1"), - Conclusion: github.Ptr("success"), - }, - { - ID: github.Ptr(int64(2)), - Name: github.Ptr("test-job-2"), - Conclusion: github.Ptr("failure"), - }, - { - ID: github.Ptr(int64(3)), - Name: github.Ptr("test-job-3"), - Conclusion: github.Ptr("failure"), - }, - }, - } - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(jobs) - }), - GetReposActionsJobsLogsByOwnerByRepoByJobID: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Location", "https://github.com/logs/job/"+r.URL.Path[len(r.URL.Path)-1:]) - w.WriteHeader(http.StatusFound) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(456), - "failed_only": true, - }, - expectError: false, - checkResponse: func(t *testing.T, response map[string]any) { - assert.Equal(t, float64(456), response["run_id"]) - assert.Equal(t, float64(3), response["total_jobs"]) - assert.Equal(t, float64(2), response["failed_jobs"]) - assert.Contains(t, response, "logs") - assert.Equal(t, "Retrieved logs for 2 failed jobs", response["message"]) - - logs, ok := response["logs"].([]interface{}) - assert.True(t, ok) - assert.Len(t, logs, 2) - }, - }, - { - name: "no failed jobs found", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsRunsJobsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - jobs := &github.Jobs{ - TotalCount: github.Ptr(2), - Jobs: []*github.WorkflowJob{ - { - ID: github.Ptr(int64(1)), - Name: github.Ptr("test-job-1"), - Conclusion: github.Ptr("success"), - }, - { - ID: github.Ptr(int64(2)), - Name: github.Ptr("test-job-2"), - Conclusion: github.Ptr("success"), - }, - }, - } - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(jobs) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(456), - "failed_only": true, - }, - expectError: false, - checkResponse: func(t *testing.T, response map[string]any) { - assert.Equal(t, "No failed jobs found in this workflow run", response["message"]) - assert.Equal(t, float64(456), response["run_id"]) - assert.Equal(t, float64(2), response["total_jobs"]) - assert.Equal(t, float64(0), response["failed_jobs"]) - }, - }, - { - name: "missing job_id when not using failed_only", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "job_id is required when failed_only is false", - }, - { - name: "missing run_id when using failed_only", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "failed_only": true, - }, - expectError: true, - expectedErrMsg: "run_id is required when failed_only is true", - }, - { - name: "missing required parameter owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "repo": "repo", - "job_id": float64(123), - }, - expectError: true, - expectedErrMsg: "missing required parameter: owner", - }, - { - name: "missing required parameter repo", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "job_id": float64(123), - }, - expectError: true, - expectedErrMsg: "missing required parameter: repo", - }, - { - name: "API error when getting single job logs", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsJobsLogsByOwnerByRepoByJobID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusNotFound) - _ = json.NewEncoder(w).Encode(map[string]string{ - "message": "Not Found", - }) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "job_id": float64(999), - }, - expectError: true, - }, - { - name: "API error when listing workflow jobs for failed_only", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsRunsJobsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusNotFound) - _ = json.NewEncoder(w).Encode(map[string]string{ - "message": "Not Found", - }) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(999), - "failed_only": true, - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - // Setup client with mock - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - ContentWindowSize: 5000, - } - handler := toolDef.Handler(deps) - - // Create call request - request := createMCPRequest(tc.requestArgs) - - // Call handler - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - // Parse the result and get the text content - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - if tc.expectError { - // For API errors, just verify we got an error - assert.True(t, result.IsError) - return - } - - // Unmarshal and verify the result - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - - if tc.checkResponse != nil { - tc.checkResponse(t, response) - } - }) - } -} - -func Test_GetJobLogs_WithContentReturn(t *testing.T) { - // Test the return_content functionality with a mock HTTP server - logContent := "2023-01-01T10:00:00.000Z Starting job...\n2023-01-01T10:00:01.000Z Running tests...\n2023-01-01T10:00:02.000Z Job completed successfully" - - // Create a test server to serve log content - testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(logContent)) - })) - defer testServer.Close() - - mockedClient := MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsJobsLogsByOwnerByRepoByJobID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Location", testServer.URL) - w.WriteHeader(http.StatusFound) - }), - }) - - client := github.NewClient(mockedClient) - toolDef := GetJobLogs(translations.NullTranslationHelper) - deps := BaseDeps{ - Client: client, - ContentWindowSize: 5000, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(map[string]any{ - "owner": "owner", - "repo": "repo", - "job_id": float64(123), - "return_content": true, - }) - - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - require.NoError(t, err) - require.False(t, result.IsError) - - textContent := getTextResult(t, result) - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - - assert.Equal(t, float64(123), response["job_id"]) - assert.Equal(t, logContent, response["logs_content"]) - assert.Equal(t, "Job logs content retrieved successfully", response["message"]) - assert.NotContains(t, response, "logs_url") // Should not have URL when returning content -} - -func Test_GetJobLogs_WithContentReturnAndTailLines(t *testing.T) { - // Test the return_content functionality with a mock HTTP server - logContent := "2023-01-01T10:00:00.000Z Starting job...\n2023-01-01T10:00:01.000Z Running tests...\n2023-01-01T10:00:02.000Z Job completed successfully" - expectedLogContent := "2023-01-01T10:00:02.000Z Job completed successfully" - - // Create a test server to serve log content - testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(logContent)) - })) - defer testServer.Close() - - mockedClient := MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsJobsLogsByOwnerByRepoByJobID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Location", testServer.URL) - w.WriteHeader(http.StatusFound) - }), - }) - - client := github.NewClient(mockedClient) - toolDef := GetJobLogs(translations.NullTranslationHelper) - deps := BaseDeps{ - Client: client, - ContentWindowSize: 5000, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(map[string]any{ - "owner": "owner", - "repo": "repo", - "job_id": float64(123), - "return_content": true, - "tail_lines": float64(1), // Requesting last 1 line - }) - - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - require.NoError(t, err) - require.False(t, result.IsError) - - textContent := getTextResult(t, result) - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - - assert.Equal(t, float64(123), response["job_id"]) - assert.Equal(t, float64(3), response["original_length"]) - assert.Equal(t, expectedLogContent, response["logs_content"]) - assert.Equal(t, "Job logs content retrieved successfully", response["message"]) - assert.NotContains(t, response, "logs_url") // Should not have URL when returning content -} - -func Test_GetJobLogs_WithContentReturnAndLargeTailLines(t *testing.T) { - logContent := "Line 1\nLine 2\nLine 3" - expectedLogContent := "Line 1\nLine 2\nLine 3" - - testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(logContent)) - })) - defer testServer.Close() - - mockedClient := MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsJobsLogsByOwnerByRepoByJobID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Location", testServer.URL) - w.WriteHeader(http.StatusFound) - }), - }) - - client := github.NewClient(mockedClient) - toolDef := GetJobLogs(translations.NullTranslationHelper) - deps := BaseDeps{ - Client: client, - ContentWindowSize: 5000, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(map[string]any{ - "owner": "owner", - "repo": "repo", - "job_id": float64(123), - "return_content": true, - "tail_lines": float64(100), - }) - - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - require.NoError(t, err) - require.False(t, result.IsError) - - textContent := getTextResult(t, result) - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - - assert.Equal(t, float64(123), response["job_id"]) - assert.Equal(t, float64(3), response["original_length"]) - assert.Equal(t, expectedLogContent, response["logs_content"]) - assert.Equal(t, "Job logs content retrieved successfully", response["message"]) - assert.NotContains(t, response, "logs_url") -} - -func Test_MemoryUsage_SlidingWindow_vs_NoWindow(t *testing.T) { - if testing.Short() { - t.Skip("Skipping memory profiling test in short mode") - } - - const logLines = 100000 - const bufferSize = 5000 - largeLogContent := strings.Repeat("log line with some content\n", logLines-1) + "final log line" - - testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(largeLogContent)) - })) - defer testServer.Close() - - os.Setenv("GITHUB_MCP_PROFILING_ENABLED", "true") - defer os.Unsetenv("GITHUB_MCP_PROFILING_ENABLED") - - profiler.InitFromEnv(nil) - ctx := context.Background() - - debug.SetGCPercent(-1) - defer debug.SetGCPercent(100) - - for i := 0; i < 3; i++ { - runtime.GC() - } - - var baselineStats runtime.MemStats - runtime.ReadMemStats(&baselineStats) - - profile1, err1 := profiler.ProfileFuncWithMetrics(ctx, "sliding_window", func() (int, int64, error) { - resp1, err := http.Get(testServer.URL) - if err != nil { - return 0, 0, err - } - defer resp1.Body.Close() //nolint:bodyclose - content, totalLines, _, err := buffer.ProcessResponseAsRingBufferToEnd(resp1, bufferSize) //nolint:bodyclose - return totalLines, int64(len(content)), err - }) - require.NoError(t, err1) - - for i := 0; i < 3; i++ { - runtime.GC() - } - - profile2, err2 := profiler.ProfileFuncWithMetrics(ctx, "no_window", func() (int, int64, error) { - resp2, err := http.Get(testServer.URL) - if err != nil { - return 0, 0, err - } - defer resp2.Body.Close() //nolint:bodyclose - - allContent, err := io.ReadAll(resp2.Body) - if err != nil { - return 0, 0, err - } - - allLines := strings.Split(string(allContent), "\n") - var nonEmptyLines []string - for _, line := range allLines { - if line != "" { - nonEmptyLines = append(nonEmptyLines, line) - } - } - totalLines := len(nonEmptyLines) - - var resultLines []string - if totalLines > bufferSize { - resultLines = nonEmptyLines[totalLines-bufferSize:] - } else { - resultLines = nonEmptyLines - } - - result := strings.Join(resultLines, "\n") - return totalLines, int64(len(result)), nil - }) - require.NoError(t, err2) - - assert.Greater(t, profile2.MemoryDelta, profile1.MemoryDelta, - "Sliding window should use less memory than reading all into memory") - - assert.Equal(t, profile1.LinesCount, profile2.LinesCount, - "Both approaches should count the same number of input lines") - assert.InDelta(t, profile1.BytesCount, profile2.BytesCount, 100, - "Both approaches should produce similar output sizes (within 100 bytes)") - - memoryReduction := float64(profile2.MemoryDelta-profile1.MemoryDelta) / float64(profile2.MemoryDelta) * 100 - t.Logf("Memory reduction: %.1f%% (%.2f MB vs %.2f MB)", - memoryReduction, - float64(profile2.MemoryDelta)/1024/1024, - float64(profile1.MemoryDelta)/1024/1024) - - t.Logf("Baseline: %d bytes", baselineStats.Alloc) - t.Logf("Sliding window: %s", profile1.String()) - t.Logf("No window: %s", profile2.String()) -} - -func Test_ListWorkflowRuns(t *testing.T) { - // Verify tool definition once - toolDef := ListWorkflowRuns(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "list_workflow_runs", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - inputSchema := toolDef.Tool.InputSchema.(*jsonschema.Schema) - assert.Contains(t, inputSchema.Properties, "owner") - assert.Contains(t, inputSchema.Properties, "repo") - assert.Contains(t, inputSchema.Properties, "workflow_id") - assert.ElementsMatch(t, inputSchema.Required, []string{"owner", "repo", "workflow_id"}) -} - -func Test_GetWorkflowRun(t *testing.T) { - // Verify tool definition once - toolDef := GetWorkflowRun(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "get_workflow_run", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - inputSchema := toolDef.Tool.InputSchema.(*jsonschema.Schema) - assert.Contains(t, inputSchema.Properties, "owner") - assert.Contains(t, inputSchema.Properties, "repo") - assert.Contains(t, inputSchema.Properties, "run_id") - assert.ElementsMatch(t, inputSchema.Required, []string{"owner", "repo", "run_id"}) -} - -func Test_GetWorkflowRunLogs(t *testing.T) { - // Verify tool definition once - toolDef := GetWorkflowRunLogs(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "get_workflow_run_logs", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - inputSchema := toolDef.Tool.InputSchema.(*jsonschema.Schema) - assert.Contains(t, inputSchema.Properties, "owner") - assert.Contains(t, inputSchema.Properties, "repo") - assert.Contains(t, inputSchema.Properties, "run_id") - assert.ElementsMatch(t, inputSchema.Required, []string{"owner", "repo", "run_id"}) -} - -func Test_ListWorkflowJobs(t *testing.T) { - // Verify tool definition once - toolDef := ListWorkflowJobs(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "list_workflow_jobs", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - inputSchema := toolDef.Tool.InputSchema.(*jsonschema.Schema) - assert.Contains(t, inputSchema.Properties, "owner") - assert.Contains(t, inputSchema.Properties, "repo") - assert.Contains(t, inputSchema.Properties, "run_id") - assert.ElementsMatch(t, inputSchema.Required, []string{"owner", "repo", "run_id"}) -} - -func Test_RerunWorkflowRun(t *testing.T) { - // Verify tool definition once - toolDef := RerunWorkflowRun(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "rerun_workflow_run", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - inputSchema := toolDef.Tool.InputSchema.(*jsonschema.Schema) - assert.Contains(t, inputSchema.Properties, "owner") - assert.Contains(t, inputSchema.Properties, "repo") - assert.Contains(t, inputSchema.Properties, "run_id") - assert.ElementsMatch(t, inputSchema.Required, []string{"owner", "repo", "run_id"}) -} - -func Test_RerunFailedJobs(t *testing.T) { - // Verify tool definition once - toolDef := RerunFailedJobs(translations.NullTranslationHelper) - require.NoError(t, toolsnaps.Test(toolDef.Tool.Name, toolDef.Tool)) - - assert.Equal(t, "rerun_failed_jobs", toolDef.Tool.Name) - assert.NotEmpty(t, toolDef.Tool.Description) - inputSchema := toolDef.Tool.InputSchema.(*jsonschema.Schema) - assert.Contains(t, inputSchema.Properties, "owner") - assert.Contains(t, inputSchema.Properties, "repo") - assert.Contains(t, inputSchema.Properties, "run_id") - assert.ElementsMatch(t, inputSchema.Required, []string{"owner", "repo", "run_id"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful rerun of failed jobs", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostReposActionsRunsRerunFailedJobsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusCreated) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Equal(t, "Failed jobs have been queued for re-run", response["message"]) - assert.Equal(t, float64(12345), response["run_id"]) - }) - } -} - -func Test_RerunWorkflowRun_Behavioral(t *testing.T) { - toolDef := RerunWorkflowRun(translations.NullTranslationHelper) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful rerun of workflow run", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostReposActionsRunsRerunByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusCreated) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Equal(t, "Workflow run has been queued for re-run", response["message"]) - assert.Equal(t, float64(12345), response["run_id"]) - }) - } -} - -func Test_ListWorkflowRuns_Behavioral(t *testing.T) { - toolDef := ListWorkflowRuns(translations.NullTranslationHelper) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful workflow runs listing", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsWorkflowsRunsByOwnerByRepoByWorkflowID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - runs := &github.WorkflowRuns{ - TotalCount: github.Ptr(2), - WorkflowRuns: []*github.WorkflowRun{ - { - ID: github.Ptr(int64(123)), - Name: github.Ptr("CI"), - Status: github.Ptr("completed"), - Conclusion: github.Ptr("success"), - }, - { - ID: github.Ptr(int64(456)), - Name: github.Ptr("CI"), - Status: github.Ptr("completed"), - Conclusion: github.Ptr("failure"), - }, - }, - } - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(runs) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "workflow_id": "ci.yml", - }, - expectError: false, - }, - { - name: "missing required parameter workflow_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: workflow_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - var response github.WorkflowRuns - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.NotNil(t, response.TotalCount) - assert.Greater(t, *response.TotalCount, 0) - }) - } -} - -func Test_GetWorkflowRun_Behavioral(t *testing.T) { - toolDef := GetWorkflowRun(translations.NullTranslationHelper) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful get workflow run", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsRunsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - run := &github.WorkflowRun{ - ID: github.Ptr(int64(12345)), - Name: github.Ptr("CI"), - Status: github.Ptr("completed"), - Conclusion: github.Ptr("success"), - } - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(run) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - var response github.WorkflowRun - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.NotNil(t, response.ID) - assert.Equal(t, int64(12345), *response.ID) - }) - } -} - -func Test_GetWorkflowRunLogs_Behavioral(t *testing.T) { - toolDef := GetWorkflowRunLogs(translations.NullTranslationHelper) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful get workflow run logs", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsRunsLogsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Location", "https://github.com/logs/run/12345") - w.WriteHeader(http.StatusFound) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Contains(t, response, "logs_url") - assert.Equal(t, "Workflow run logs are available for download", response["message"]) - }) - } -} - -func Test_ListWorkflowJobs_Behavioral(t *testing.T) { - toolDef := ListWorkflowJobs(translations.NullTranslationHelper) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - }{ - { - name: "successful list workflow jobs", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetReposActionsRunsJobsByOwnerByRepoByRunID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - jobs := &github.Jobs{ - TotalCount: github.Ptr(2), - Jobs: []*github.WorkflowJob{ - { - ID: github.Ptr(int64(1)), - Name: github.Ptr("build"), - Status: github.Ptr("completed"), - Conclusion: github.Ptr("success"), - }, - { - ID: github.Ptr(int64(2)), - Name: github.Ptr("test"), - Status: github.Ptr("completed"), - Conclusion: github.Ptr("failure"), - }, - }, - } - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(jobs) - }), - }), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - "run_id": float64(12345), - }, - expectError: false, - }, - { - name: "missing required parameter run_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "owner", - "repo": "repo", - }, - expectError: true, - expectedErrMsg: "missing required parameter: run_id", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := github.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := toolDef.Handler(deps) - - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - require.Equal(t, tc.expectError, result.IsError) - - textContent := getTextResult(t, result) - - if tc.expectedErrMsg != "" { - assert.Equal(t, tc.expectedErrMsg, textContent.Text) - return - } - - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - assert.Contains(t, response, "jobs") - }) - } -} - // Tests for consolidated actions tools func Test_ActionsList(t *testing.T) { diff --git a/pkg/github/code_scanning.go b/pkg/github/code_scanning.go index ccc00661a5..34249b2129 100644 --- a/pkg/github/code_scanning.go +++ b/pkg/github/code_scanning.go @@ -11,7 +11,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/code_scanning_test.go b/pkg/github/code_scanning_test.go index 59972fe52d..7a3c16fd15 100644 --- a/pkg/github/code_scanning_test.go +++ b/pkg/github/code_scanning_test.go @@ -8,7 +8,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -41,7 +41,7 @@ func Test_GetCodeScanningAlert(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAlert *github.Alert expectedErrMsg string @@ -51,7 +51,7 @@ func Test_GetCodeScanningAlert(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposCodeScanningAlertsByOwnerByRepoByAlertNumber: mockResponse(t, http.StatusOK, mockAlert), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "alertNumber": float64(42), @@ -67,7 +67,7 @@ func Test_GetCodeScanningAlert(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "alertNumber": float64(9999), @@ -158,7 +158,7 @@ func Test_ListCodeScanningAlerts(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAlerts []*github.Alert expectedErrMsg string @@ -175,7 +175,7 @@ func Test_ListCodeScanningAlerts(t *testing.T) { mockResponse(t, http.StatusOK, mockAlerts), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "ref": "main", @@ -194,7 +194,7 @@ func Test_ListCodeScanningAlerts(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Unauthorized access"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, diff --git a/pkg/github/context_tools.go b/pkg/github/context_tools.go index 29fa2925d4..902734481a 100644 --- a/pkg/github/context_tools.go +++ b/pkg/github/context_tools.go @@ -15,6 +15,9 @@ import ( "github.com/shurcooL/githubv4" ) +// GetMeUIResourceURI is the URI for the get_me tool's MCP App UI resource. +const GetMeUIResourceURI = "ui://github-mcp-server/get-me" + // UserDetails contains additional fields about a GitHub user not already // present in MinimalUser. Used by get_me context tool but omitted from search_users. type UserDetails struct { @@ -51,6 +54,11 @@ func GetMe(t translations.TranslationHelperFunc) inventory.ServerTool { // Use json.RawMessage to ensure "properties" is included even when empty. // OpenAI strict mode requires the properties field to be present. InputSchema: json.RawMessage(`{"type":"object","properties":{}}`), + Meta: mcp.Meta{ + "ui": map[string]any{ + "resourceUri": GetMeUIResourceURI, + }, + }, }, nil, func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, _ map[string]any) (*mcp.CallToolResult, any, error) { @@ -179,7 +187,7 @@ func GetTeams(t translations.TranslationHelperFunc) inventory.ServerTool { } `graphql:"organizations(first: 100)"` } `graphql:"user(login: $login)"` } - vars := map[string]interface{}{ + vars := map[string]any{ "login": githubv4.String(username), } if err := gqlClient.Query(ctx, &q, vars); err != nil { @@ -262,7 +270,7 @@ func GetTeamMembers(t translations.TranslationHelperFunc) inventory.ServerTool { } `graphql:"team(slug: $teamSlug)"` } `graphql:"organization(login: $org)"` } - vars := map[string]interface{}{ + vars := map[string]any{ "org": githubv4.String(org), "teamSlug": githubv4.String(teamSlug), } diff --git a/pkg/github/context_tools_test.go b/pkg/github/context_tools_test.go index 3f4261e719..3925019853 100644 --- a/pkg/github/context_tools_test.go +++ b/pkg/github/context_tools_test.go @@ -10,7 +10,7 @@ import ( "github.com/github/github-mcp-server/internal/githubv4mock" "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/shurcooL/githubv4" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -215,7 +215,7 @@ func Test_GetTeams(t *testing.T) { // to ensure each test gets a fresh client gqlClientForTestuser := func() *githubv4.Client { queryStr := "query($login:String!){user(login: $login){organizations(first: 100){nodes{login,teams(first: 100, userLogins: [$login]){nodes{name,slug,description}}}}}}" - vars := map[string]interface{}{ + vars := map[string]any{ "login": "testuser", } matcher := githubv4mock.NewQueryMatcher(queryStr, vars, mockTeamsResponse) @@ -225,7 +225,7 @@ func Test_GetTeams(t *testing.T) { gqlClientForSpecificuser := func() *githubv4.Client { queryStr := "query($login:String!){user(login: $login){organizations(first: 100){nodes{login,teams(first: 100, userLogins: [$login]){nodes{name,slug,description}}}}}}" - vars := map[string]interface{}{ + vars := map[string]any{ "login": "specificuser", } matcher := githubv4mock.NewQueryMatcher(queryStr, vars, mockTeamsResponse) @@ -235,7 +235,7 @@ func Test_GetTeams(t *testing.T) { gqlClientNoTeams := func() *githubv4.Client { queryStr := "query($login:String!){user(login: $login){organizations(first: 100){nodes{login,teams(first: 100, userLogins: [$login]){nodes{name,slug,description}}}}}}" - vars := map[string]interface{}{ + vars := map[string]any{ "login": "testuser", } matcher := githubv4mock.NewQueryMatcher(queryStr, vars, mockNoTeamsResponse) @@ -419,7 +419,7 @@ func Test_GetTeamMembers(t *testing.T) { // Create GQL clients for different test scenarios gqlClientWithMembers := func() *githubv4.Client { queryStr := "query($org:String!$teamSlug:String!){organization(login: $org){team(slug: $teamSlug){members(first: 100){nodes{login}}}}}" - vars := map[string]interface{}{ + vars := map[string]any{ "org": "testorg", "teamSlug": "testteam", } @@ -430,7 +430,7 @@ func Test_GetTeamMembers(t *testing.T) { gqlClientNoMembers := func() *githubv4.Client { queryStr := "query($org:String!$teamSlug:String!){organization(login: $org){team(slug: $teamSlug){members(first: 100){nodes{login}}}}}" - vars := map[string]interface{}{ + vars := map[string]any{ "org": "testorg", "teamSlug": "emptyteam", } diff --git a/pkg/github/dependabot.go b/pkg/github/dependabot.go index b6b2eeaba7..6f0da1b208 100644 --- a/pkg/github/dependabot.go +++ b/pkg/github/dependabot.go @@ -12,7 +12,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/dependabot_test.go b/pkg/github/dependabot_test.go index e57405a8cb..e20d2668ff 100644 --- a/pkg/github/dependabot_test.go +++ b/pkg/github/dependabot_test.go @@ -8,7 +8,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -34,7 +34,7 @@ func Test_GetDependabotAlert(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAlert *github.DependabotAlert expectedErrMsg string @@ -44,7 +44,7 @@ func Test_GetDependabotAlert(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposDependabotAlertsByOwnerByRepoByAlertNumber: mockResponse(t, http.StatusOK, mockAlert), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "alertNumber": float64(42), @@ -60,7 +60,7 @@ func Test_GetDependabotAlert(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "alertNumber": float64(9999), @@ -140,7 +140,7 @@ func Test_ListDependabotAlerts(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAlerts []*github.DependabotAlert expectedErrMsg string @@ -154,7 +154,7 @@ func Test_ListDependabotAlerts(t *testing.T) { mockResponse(t, http.StatusOK, []*github.DependabotAlert{&criticalAlert}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "state": "open", @@ -171,7 +171,7 @@ func Test_ListDependabotAlerts(t *testing.T) { mockResponse(t, http.StatusOK, []*github.DependabotAlert{&highSeverityAlert}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "severity": "high", @@ -186,7 +186,7 @@ func Test_ListDependabotAlerts(t *testing.T) { mockResponse(t, http.StatusOK, []*github.DependabotAlert{&criticalAlert, &highSeverityAlert}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -201,7 +201,7 @@ func Test_ListDependabotAlerts(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Unauthorized access"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, diff --git a/pkg/github/dependencies.go b/pkg/github/dependencies.go index 15d807a249..f966c531e5 100644 --- a/pkg/github/dependencies.go +++ b/pkg/github/dependencies.go @@ -4,14 +4,18 @@ import ( "context" "errors" "fmt" + "net/http" "os" + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/http/transport" "github.com/github/github-mcp-server/pkg/inventory" "github.com/github/github-mcp-server/pkg/lockdown" "github.com/github/github-mcp-server/pkg/raw" "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" - gogithub "github.com/google/go-github/v79/github" + "github.com/github/github-mcp-server/pkg/utils" + gogithub "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/shurcooL/githubv4" ) @@ -23,6 +27,14 @@ type depsContextKey struct{} // ErrDepsNotInContext is returned when ToolDependencies is not found in context. var ErrDepsNotInContext = errors.New("ToolDependencies not found in context; use ContextWithDeps to inject") +func InjectDepsMiddleware(deps ToolDependencies) mcp.Middleware { + return func(next mcp.MethodHandler) mcp.MethodHandler { + return func(ctx context.Context, method string, req mcp.Request) (result mcp.Result, err error) { + return next(ContextWithDeps(ctx, deps), method, req) + } + } +} + // ContextWithDeps returns a new context with the ToolDependencies stored in it. // This is used to inject dependencies at request time rather than at registration time, // avoiding expensive closure creation during server initialization. @@ -69,13 +81,13 @@ type ToolDependencies interface { GetRawClient(ctx context.Context) (*raw.Client, error) // GetRepoAccessCache returns the lockdown mode repo access cache - GetRepoAccessCache() *lockdown.RepoAccessCache + GetRepoAccessCache(ctx context.Context) (*lockdown.RepoAccessCache, error) // GetT returns the translation helper function GetT() translations.TranslationHelperFunc // GetFlags returns feature flags - GetFlags() FeatureFlags + GetFlags(ctx context.Context) FeatureFlags // GetContentWindowSize returns the content window size for log truncation GetContentWindowSize() int @@ -145,13 +157,15 @@ func (d BaseDeps) GetRawClient(_ context.Context) (*raw.Client, error) { } // GetRepoAccessCache implements ToolDependencies. -func (d BaseDeps) GetRepoAccessCache() *lockdown.RepoAccessCache { return d.RepoAccessCache } +func (d BaseDeps) GetRepoAccessCache(_ context.Context) (*lockdown.RepoAccessCache, error) { + return d.RepoAccessCache, nil +} // GetT implements ToolDependencies. func (d BaseDeps) GetT() translations.TranslationHelperFunc { return d.T } // GetFlags implements ToolDependencies. -func (d BaseDeps) GetFlags() FeatureFlags { return d.Flags } +func (d BaseDeps) GetFlags(_ context.Context) FeatureFlags { return d.Flags } // GetContentWindowSize implements ToolDependencies. func (d BaseDeps) GetContentWindowSize() int { return d.ContentWindowSize } @@ -221,3 +235,157 @@ func NewToolFromHandler( st.AcceptedScopes = scopes.ExpandScopes(requiredScopes...) return st } + +type RequestDeps struct { + // Static dependencies + apiHosts utils.APIHostResolver + version string + lockdownMode bool + RepoAccessOpts []lockdown.RepoAccessOption + T translations.TranslationHelperFunc + ContentWindowSize int + + // Feature flag checker for runtime checks + featureChecker inventory.FeatureFlagChecker +} + +// NewRequestDeps creates a RequestDeps with the provided clients and configuration. +func NewRequestDeps( + apiHosts utils.APIHostResolver, + version string, + lockdownMode bool, + repoAccessOpts []lockdown.RepoAccessOption, + t translations.TranslationHelperFunc, + contentWindowSize int, + featureChecker inventory.FeatureFlagChecker, +) *RequestDeps { + return &RequestDeps{ + apiHosts: apiHosts, + version: version, + lockdownMode: lockdownMode, + RepoAccessOpts: repoAccessOpts, + T: t, + ContentWindowSize: contentWindowSize, + featureChecker: featureChecker, + } +} + +// GetClient implements ToolDependencies. +func (d *RequestDeps) GetClient(ctx context.Context) (*gogithub.Client, error) { + // extract the token from the context + tokenInfo, ok := ghcontext.GetTokenInfo(ctx) + if !ok { + return nil, fmt.Errorf("no token info in context") + } + token := tokenInfo.Token + + baseRestURL, err := d.apiHosts.BaseRESTURL(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get base REST URL: %w", err) + } + uploadURL, err := d.apiHosts.UploadURL(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get upload URL: %w", err) + } + + // Construct REST client + restClient := gogithub.NewClient(nil).WithAuthToken(token) + restClient.UserAgent = fmt.Sprintf("github-mcp-server/%s", d.version) + restClient.BaseURL = baseRestURL + restClient.UploadURL = uploadURL + return restClient, nil +} + +// GetGQLClient implements ToolDependencies. +func (d *RequestDeps) GetGQLClient(ctx context.Context) (*githubv4.Client, error) { + // extract the token from the context + tokenInfo, ok := ghcontext.GetTokenInfo(ctx) + if !ok { + return nil, fmt.Errorf("no token info in context") + } + token := tokenInfo.Token + + // Construct GraphQL client + // We use NewEnterpriseClient unconditionally since we already parsed the API host + // Wrap transport with GraphQLFeaturesTransport to inject feature flags from context, + // matching the transport chain used by the remote server. + gqlHTTPClient := &http.Client{ + Transport: &transport.BearerAuthTransport{ + Transport: &transport.GraphQLFeaturesTransport{ + Transport: http.DefaultTransport, + }, + Token: token, + }, + } + + graphqlURL, err := d.apiHosts.GraphqlURL(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get GraphQL URL: %w", err) + } + + gqlClient := githubv4.NewEnterpriseClient(graphqlURL.String(), gqlHTTPClient) + return gqlClient, nil +} + +// GetRawClient implements ToolDependencies. +func (d *RequestDeps) GetRawClient(ctx context.Context) (*raw.Client, error) { + client, err := d.GetClient(ctx) + if err != nil { + return nil, err + } + + rawURL, err := d.apiHosts.RawURL(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get Raw URL: %w", err) + } + + rawClient := raw.NewClient(client, rawURL) + + return rawClient, nil +} + +// GetRepoAccessCache implements ToolDependencies. +func (d *RequestDeps) GetRepoAccessCache(ctx context.Context) (*lockdown.RepoAccessCache, error) { + if !d.lockdownMode { + return nil, nil + } + + gqlClient, err := d.GetGQLClient(ctx) + if err != nil { + return nil, err + } + + // Create repo access cache + instance := lockdown.GetInstance(gqlClient, d.RepoAccessOpts...) + return instance, nil +} + +// GetT implements ToolDependencies. +func (d *RequestDeps) GetT() translations.TranslationHelperFunc { return d.T } + +// GetFlags implements ToolDependencies. +func (d *RequestDeps) GetFlags(ctx context.Context) FeatureFlags { + return FeatureFlags{ + LockdownMode: d.lockdownMode && ghcontext.IsLockdownMode(ctx), + InsidersMode: ghcontext.IsInsidersMode(ctx), + } +} + +// GetContentWindowSize implements ToolDependencies. +func (d *RequestDeps) GetContentWindowSize() int { return d.ContentWindowSize } + +// IsFeatureEnabled checks if a feature flag is enabled. +func (d *RequestDeps) IsFeatureEnabled(ctx context.Context, flagName string) bool { + if d.featureChecker == nil || flagName == "" { + return false + } + + enabled, err := d.featureChecker(ctx, flagName) + if err != nil { + // Log error but don't fail the tool - treat as disabled + fmt.Fprintf(os.Stderr, "Feature flag check error for %q: %v\n", flagName, err) + return false + } + + return enabled +} diff --git a/pkg/github/discussions.go b/pkg/github/discussions.go index c036708187..6971bab076 100644 --- a/pkg/github/discussions.go +++ b/pkg/github/discussions.go @@ -10,7 +10,7 @@ import ( "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" "github.com/go-viper/mapstructure/v2" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/shurcooL/githubv4" @@ -214,7 +214,7 @@ func ListDiscussions(t translations.TranslationHelperFunc) inventory.ServerTool categoryID = &id } - vars := map[string]interface{}{ + vars := map[string]any{ "owner": githubv4.String(owner), "repo": githubv4.String(repo), "first": githubv4.Int(*paginationParams.First), @@ -256,9 +256,9 @@ func ListDiscussions(t translations.TranslationHelperFunc) inventory.ServerTool } // Create response with pagination info - response := map[string]interface{}{ + response := map[string]any{ "discussions": discussions, - "pageInfo": map[string]interface{}{ + "pageInfo": map[string]any{ "hasNextPage": pageInfo.HasNextPage, "hasPreviousPage": pageInfo.HasPreviousPage, "startCursor": string(pageInfo.StartCursor), @@ -338,7 +338,7 @@ func GetDiscussion(t translations.TranslationHelperFunc) inventory.ServerTool { } `graphql:"discussion(number: $discussionNumber)"` } `graphql:"repository(owner: $owner, name: $repo)"` } - vars := map[string]interface{}{ + vars := map[string]any{ "owner": githubv4.String(params.Owner), "repo": githubv4.String(params.Repo), "discussionNumber": githubv4.Int(params.DiscussionNumber), @@ -352,7 +352,7 @@ func GetDiscussion(t translations.TranslationHelperFunc) inventory.ServerTool { // The go-github library's Discussion type lacks isAnswered and answerChosenAt fields, // so we use map[string]interface{} for the response (consistent with other functions // like ListDiscussions and GetDiscussionComments). - response := map[string]interface{}{ + response := map[string]any{ "number": int(d.Number), "title": string(d.Title), "body": string(d.Body), @@ -360,7 +360,7 @@ func GetDiscussion(t translations.TranslationHelperFunc) inventory.ServerTool { "closed": bool(d.Closed), "isAnswered": bool(d.IsAnswered), "createdAt": d.CreatedAt.Time, - "category": map[string]interface{}{ + "category": map[string]any{ "name": string(d.Category.Name), }, } @@ -465,7 +465,7 @@ func GetDiscussionComments(t translations.TranslationHelperFunc) inventory.Serve } `graphql:"discussion(number: $discussionNumber)"` } `graphql:"repository(owner: $owner, name: $repo)"` } - vars := map[string]interface{}{ + vars := map[string]any{ "owner": githubv4.String(params.Owner), "repo": githubv4.String(params.Repo), "discussionNumber": githubv4.Int(params.DiscussionNumber), @@ -486,9 +486,9 @@ func GetDiscussionComments(t translations.TranslationHelperFunc) inventory.Serve } // Create response with pagination info - response := map[string]interface{}{ + response := map[string]any{ "comments": comments, - "pageInfo": map[string]interface{}{ + "pageInfo": map[string]any{ "hasNextPage": q.Repository.Discussion.Comments.PageInfo.HasNextPage, "hasPreviousPage": q.Repository.Discussion.Comments.PageInfo.HasPreviousPage, "startCursor": string(q.Repository.Discussion.Comments.PageInfo.StartCursor), @@ -570,7 +570,7 @@ func ListDiscussionCategories(t translations.TranslationHelperFunc) inventory.Se } `graphql:"discussionCategories(first: $first)"` } `graphql:"repository(owner: $owner, name: $repo)"` } - vars := map[string]interface{}{ + vars := map[string]any{ "owner": githubv4.String(owner), "repo": githubv4.String(repo), "first": githubv4.Int(25), @@ -588,9 +588,9 @@ func ListDiscussionCategories(t translations.TranslationHelperFunc) inventory.Se } // Create response with pagination info - response := map[string]interface{}{ + response := map[string]any{ "categories": categories, - "pageInfo": map[string]interface{}{ + "pageInfo": map[string]any{ "hasNextPage": q.Repository.DiscussionCategories.PageInfo.HasNextPage, "hasPreviousPage": q.Repository.DiscussionCategories.PageInfo.HasPreviousPage, "startCursor": string(q.Repository.DiscussionCategories.PageInfo.StartCursor), diff --git a/pkg/github/discussions_test.go b/pkg/github/discussions_test.go index 0ec9982805..998a6471bb 100644 --- a/pkg/github/discussions_test.go +++ b/pkg/github/discussions_test.go @@ -9,7 +9,7 @@ import ( "github.com/github/github-mcp-server/internal/githubv4mock" "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/shurcooL/githubv4" "github.com/stretchr/testify/assert" @@ -228,21 +228,21 @@ func Test_ListDiscussions(t *testing.T) { assert.ElementsMatch(t, schema.Required, []string{"owner"}) // Variables matching what GraphQL receives after JSON marshaling/unmarshaling - varsListAll := map[string]interface{}{ + varsListAll := map[string]any{ "owner": "owner", "repo": "repo", "first": float64(30), "after": (*string)(nil), } - varsRepoNotFound := map[string]interface{}{ + varsRepoNotFound := map[string]any{ "owner": "owner", "repo": "nonexistent-repo", "first": float64(30), "after": (*string)(nil), } - varsDiscussionsFiltered := map[string]interface{}{ + varsDiscussionsFiltered := map[string]any{ "owner": "owner", "repo": "repo", "categoryId": "DIC_kwDOABC123", @@ -250,7 +250,7 @@ func Test_ListDiscussions(t *testing.T) { "after": (*string)(nil), } - varsOrderByCreatedAsc := map[string]interface{}{ + varsOrderByCreatedAsc := map[string]any{ "owner": "owner", "repo": "repo", "orderByField": "CREATED_AT", @@ -259,7 +259,7 @@ func Test_ListDiscussions(t *testing.T) { "after": (*string)(nil), } - varsOrderByUpdatedDesc := map[string]interface{}{ + varsOrderByUpdatedDesc := map[string]any{ "owner": "owner", "repo": "repo", "orderByField": "UPDATED_AT", @@ -268,7 +268,7 @@ func Test_ListDiscussions(t *testing.T) { "after": (*string)(nil), } - varsCategoryWithOrder := map[string]interface{}{ + varsCategoryWithOrder := map[string]any{ "owner": "owner", "repo": "repo", "categoryId": "DIC_kwDOABC123", @@ -278,7 +278,7 @@ func Test_ListDiscussions(t *testing.T) { "after": (*string)(nil), } - varsOrgLevel := map[string]interface{}{ + varsOrgLevel := map[string]any{ "owner": "owner", "repo": ".github", // This is what gets set when repo is not provided "first": float64(30), @@ -287,7 +287,7 @@ func Test_ListDiscussions(t *testing.T) { tests := []struct { name string - reqParams map[string]interface{} + reqParams map[string]any expectError bool errContains string expectedCount int @@ -295,7 +295,7 @@ func Test_ListDiscussions(t *testing.T) { }{ { name: "list all discussions without category filter", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -304,7 +304,7 @@ func Test_ListDiscussions(t *testing.T) { }, { name: "filter by category ID", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "category": "DIC_kwDOABC123", @@ -314,7 +314,7 @@ func Test_ListDiscussions(t *testing.T) { }, { name: "order by created at ascending", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "orderBy": "CREATED_AT", @@ -332,7 +332,7 @@ func Test_ListDiscussions(t *testing.T) { }, { name: "order by updated at descending", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "orderBy": "UPDATED_AT", @@ -350,7 +350,7 @@ func Test_ListDiscussions(t *testing.T) { }, { name: "filter by category with order", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "category": "DIC_kwDOABC123", @@ -368,7 +368,7 @@ func Test_ListDiscussions(t *testing.T) { }, { name: "order by without direction (should not use ordering)", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "orderBy": "CREATED_AT", @@ -378,7 +378,7 @@ func Test_ListDiscussions(t *testing.T) { }, { name: "direction without order by (should not use ordering)", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "direction": "DESC", @@ -388,7 +388,7 @@ func Test_ListDiscussions(t *testing.T) { }, { name: "repository not found error", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "nonexistent-repo", }, @@ -397,7 +397,7 @@ func Test_ListDiscussions(t *testing.T) { }, { name: "list org-level discussions (no repo provided)", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", // repo is not provided, it will default to ".github" }, @@ -511,7 +511,7 @@ func Test_GetDiscussion(t *testing.T) { // Use exact string query that matches implementation output qGetDiscussion := "query($discussionNumber:Int!$owner:String!$repo:String!){repository(owner: $owner, name: $repo){discussion(number: $discussionNumber){number,title,body,createdAt,closed,isAnswered,answerChosenAt,url,category{name}}}}" - vars := map[string]interface{}{ + vars := map[string]any{ "owner": "owner", "repo": "repo", "discussionNumber": float64(1), @@ -520,7 +520,7 @@ func Test_GetDiscussion(t *testing.T) { name string response githubv4mock.GQLResponse expectError bool - expected map[string]interface{} + expected map[string]any errContains string }{ { @@ -538,7 +538,7 @@ func Test_GetDiscussion(t *testing.T) { }}, }), expectError: false, - expected: map[string]interface{}{ + expected: map[string]any{ "number": float64(1), "title": "Test Discussion Title", "body": "This is a test discussion", @@ -562,7 +562,7 @@ func Test_GetDiscussion(t *testing.T) { deps := BaseDeps{GQLClient: gqlClient} handler := toolDef.Handler(deps) - reqParams := map[string]interface{}{"owner": "owner", "repo": "repo", "discussionNumber": int32(1)} + reqParams := map[string]any{"owner": "owner", "repo": "repo", "discussionNumber": int32(1)} req := createMCPRequest(reqParams) res, err := handler(ContextWithDeps(context.Background(), deps), &req) text := getTextResult(t, res).Text @@ -574,7 +574,7 @@ func Test_GetDiscussion(t *testing.T) { } require.NoError(t, err) - var out map[string]interface{} + var out map[string]any require.NoError(t, json.Unmarshal([]byte(text), &out)) assert.Equal(t, tc.expected["number"], out["number"]) assert.Equal(t, tc.expected["title"], out["title"]) @@ -583,7 +583,7 @@ func Test_GetDiscussion(t *testing.T) { assert.Equal(t, tc.expected["closed"], out["closed"]) assert.Equal(t, tc.expected["isAnswered"], out["isAnswered"]) // Check category is present - category, ok := out["category"].(map[string]interface{}) + category, ok := out["category"].(map[string]any) require.True(t, ok) assert.Equal(t, "General", category["name"]) }) @@ -609,7 +609,7 @@ func Test_GetDiscussionComments(t *testing.T) { qGetComments := "query($after:String$discussionNumber:Int!$first:Int!$owner:String!$repo:String!){repository(owner: $owner, name: $repo){discussion(number: $discussionNumber){comments(first: $first, after: $after){nodes{body},pageInfo{hasNextPage,hasPreviousPage,startCursor,endCursor},totalCount}}}}" // Variables matching what GraphQL receives after JSON marshaling/unmarshaling - vars := map[string]interface{}{ + vars := map[string]any{ "owner": "owner", "repo": "repo", "discussionNumber": float64(1), @@ -642,7 +642,7 @@ func Test_GetDiscussionComments(t *testing.T) { deps := BaseDeps{GQLClient: gqlClient} handler := toolDef.Handler(deps) - reqParams := map[string]interface{}{ + reqParams := map[string]any{ "owner": "owner", "repo": "repo", "discussionNumber": int32(1), @@ -693,14 +693,14 @@ func Test_ListDiscussionCategories(t *testing.T) { qListCategories := "query($first:Int!$owner:String!$repo:String!){repository(owner: $owner, name: $repo){discussionCategories(first: $first){nodes{id,name},pageInfo{hasNextPage,hasPreviousPage,startCursor,endCursor},totalCount}}}" // Variables for repository-level categories - varsRepo := map[string]interface{}{ + varsRepo := map[string]any{ "owner": "owner", "repo": "repo", "first": float64(25), } // Variables for organization-level categories (using .github repo) - varsOrg := map[string]interface{}{ + varsOrg := map[string]any{ "owner": "owner", "repo": ".github", "first": float64(25), @@ -745,8 +745,8 @@ func Test_ListDiscussionCategories(t *testing.T) { tests := []struct { name string - reqParams map[string]interface{} - vars map[string]interface{} + reqParams map[string]any + vars map[string]any mockResponse githubv4mock.GQLResponse expectError bool expectedCount int @@ -754,7 +754,7 @@ func Test_ListDiscussionCategories(t *testing.T) { }{ { name: "list repository-level discussion categories", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -769,7 +769,7 @@ func Test_ListDiscussionCategories(t *testing.T) { }, { name: "list org-level discussion categories (no repo provided)", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", // repo is not provided, it will default to ".github" }, diff --git a/pkg/github/feature_flags_test.go b/pkg/github/feature_flags_test.go index 498c6e4876..2f0a435c95 100644 --- a/pkg/github/feature_flags_test.go +++ b/pkg/github/feature_flags_test.go @@ -45,7 +45,7 @@ func HelloWorldTool(t translations.TranslationHelperFunc) inventory.ServerTool { if deps.IsFeatureEnabled(ctx, RemoteMCPEnthusiasticGreeting) { greeting += " Welcome to the future of MCP! 🎉" } - if deps.GetFlags().InsidersMode { + if deps.GetFlags(ctx).InsidersMode { greeting += " Experimental features are enabled! 🚀" } diff --git a/pkg/github/gists.go b/pkg/github/gists.go index 0f43ebdf99..a0bc1b0855 100644 --- a/pkg/github/gists.go +++ b/pkg/github/gists.go @@ -12,7 +12,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/gists_test.go b/pkg/github/gists_test.go index 0dd112afb2..74cd45d276 100644 --- a/pkg/github/gists_test.go +++ b/pkg/github/gists_test.go @@ -9,7 +9,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -69,7 +69,7 @@ func Test_ListGists(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedGists []*github.Gist expectedErrMsg string @@ -79,7 +79,7 @@ func Test_ListGists(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetGists: mockResponse(t, http.StatusOK, mockGists), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: false, expectedGists: mockGists, }, @@ -88,7 +88,7 @@ func Test_ListGists(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetUsersGistsByUsername: mockResponse(t, http.StatusOK, mockGists), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "username": "testuser", }, expectError: false, @@ -105,7 +105,7 @@ func Test_ListGists(t *testing.T) { mockResponse(t, http.StatusOK, mockGists), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "since": "2023-01-01T00:00:00Z", "page": float64(2), "perPage": float64(5), @@ -118,7 +118,7 @@ func Test_ListGists(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetGists: mockResponse(t, http.StatusOK, mockGists), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "since": "invalid-date", }, expectError: true, @@ -132,7 +132,7 @@ func Test_ListGists(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Requires authentication"}`)) }), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: true, expectedErrMsg: "failed to list gists", }, @@ -219,7 +219,7 @@ func Test_GetGist(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedGists github.Gist expectedErrMsg string @@ -229,7 +229,7 @@ func Test_GetGist(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetGistsByGistID: mockResponse(t, http.StatusOK, mockGist), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "gist_id": "gist1", }, expectError: false, @@ -243,7 +243,7 @@ func Test_GetGist(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Invalid Request"}`)) }), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: true, expectedErrMsg: "missing required parameter: gist_id", }, @@ -332,7 +332,7 @@ func Test_CreateGist(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedErrMsg string expectedGist *github.Gist @@ -342,7 +342,7 @@ func Test_CreateGist(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostGists: mockResponse(t, http.StatusCreated, createdGist), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "filename": "test.go", "content": "package main\n\nfunc main() {\n\tfmt.Println(\"Hello, Gist!\")\n}", "description": "Test Gist", @@ -354,7 +354,7 @@ func Test_CreateGist(t *testing.T) { { name: "missing required filename", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "content": "test content", "description": "Test Gist", }, @@ -364,7 +364,7 @@ func Test_CreateGist(t *testing.T) { { name: "missing required content", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "filename": "test.go", "description": "Test Gist", }, @@ -379,7 +379,7 @@ func Test_CreateGist(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Requires authentication"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "filename": "test.go", "content": "package main", "description": "Test Gist", @@ -471,7 +471,7 @@ func Test_UpdateGist(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedErrMsg string expectedGist *github.Gist @@ -481,7 +481,7 @@ func Test_UpdateGist(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchGistsByGistID: mockResponse(t, http.StatusOK, updatedGist), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "gist_id": "existing-gist-id", "filename": "updated.go", "content": "package main\n\nfunc main() {\n\tfmt.Println(\"Updated Gist!\")\n}", @@ -493,7 +493,7 @@ func Test_UpdateGist(t *testing.T) { { name: "missing required gist_id", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "filename": "updated.go", "content": "updated content", "description": "Updated Test Gist", @@ -504,7 +504,7 @@ func Test_UpdateGist(t *testing.T) { { name: "missing required filename", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "gist_id": "existing-gist-id", "content": "updated content", "description": "Updated Test Gist", @@ -515,7 +515,7 @@ func Test_UpdateGist(t *testing.T) { { name: "missing required content", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "gist_id": "existing-gist-id", "filename": "updated.go", "description": "Updated Test Gist", @@ -531,7 +531,7 @@ func Test_UpdateGist(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "gist_id": "nonexistent-gist-id", "filename": "updated.go", "content": "package main", diff --git a/pkg/github/git.go b/pkg/github/git.go index ec7159b9bc..33a1f94efa 100644 --- a/pkg/github/git.go +++ b/pkg/github/git.go @@ -11,7 +11,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/git_test.go b/pkg/github/git_test.go index d60aed0929..cef65c9ef4 100644 --- a/pkg/github/git_test.go +++ b/pkg/github/git_test.go @@ -9,7 +9,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -63,7 +63,7 @@ func Test_GetRepositoryTree(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedErrMsg string }{ @@ -73,7 +73,7 @@ func Test_GetRepositoryTree(t *testing.T) { GetReposByOwnerByRepo: mockResponse(t, http.StatusOK, mockRepo), GetReposGitTreesByOwnerByRepoByTree: mockResponse(t, http.StatusOK, mockTree), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -84,7 +84,7 @@ func Test_GetRepositoryTree(t *testing.T) { GetReposByOwnerByRepo: mockResponse(t, http.StatusOK, mockRepo), GetReposGitTreesByOwnerByRepoByTree: mockResponse(t, http.StatusOK, mockTree), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path_filter": "src/", @@ -98,7 +98,7 @@ func Test_GetRepositoryTree(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "nonexistent", }, @@ -114,7 +114,7 @@ func Test_GetRepositoryTree(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -149,7 +149,7 @@ func Test_GetRepositoryTree(t *testing.T) { textContent := getTextResult(t, result) // Parse the JSON response - var treeResponse map[string]interface{} + var treeResponse map[string]any err := json.Unmarshal([]byte(textContent.Text), &treeResponse) require.NoError(t, err) @@ -163,9 +163,9 @@ func Test_GetRepositoryTree(t *testing.T) { // Check filtering if path_filter was provided if pathFilter, exists := tc.requestArgs["path_filter"]; exists { - tree := treeResponse["tree"].([]interface{}) + tree := treeResponse["tree"].([]any) for _, entry := range tree { - entryMap := entry.(map[string]interface{}) + entryMap := entry.(map[string]any) path := entryMap["path"].(string) assert.True(t, strings.HasPrefix(path, pathFilter.(string)), "Path %s should start with filter %s", path, pathFilter) diff --git a/pkg/github/helper_test.go b/pkg/github/helper_test.go index 0bb73008ec..ae6c644e2d 100644 --- a/pkg/github/helper_test.go +++ b/pkg/github/helper_test.go @@ -2,6 +2,7 @@ package github import ( "bytes" + "context" "encoding/json" "io" "net/http" @@ -72,6 +73,7 @@ const ( PutReposPullsMergeByOwnerByRepoByPullNumber = "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge" PutReposPullsUpdateBranchByOwnerByRepoByPullNumber = "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" PostReposPullsRequestedReviewersByOwnerByRepoByPullNumber = "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + PostReposPullsCommentsByOwnerByRepoByPullNumber = "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" // Notifications endpoints GetNotifications = "GET /notifications" @@ -249,7 +251,7 @@ func (p *partialMock) andThen(responseHandler http.HandlerFunc) http.HandlerFunc // mockResponse is a helper function to create a mock HTTP response handler // that returns a specified status code and marshaled body. -func mockResponse(t *testing.T, code int, body interface{}) http.HandlerFunc { +func mockResponse(t *testing.T, code int, body any) http.HandlerFunc { t.Helper() return func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(code) @@ -270,9 +272,9 @@ func mockResponse(t *testing.T, code int, body interface{}) http.HandlerFunc { // createMCPRequest is a helper function to create a MCP request with the given arguments. func createMCPRequest(args any) mcp.CallToolRequest { // convert args to map[string]interface{} and serialize to JSON - argsMap, ok := args.(map[string]interface{}) + argsMap, ok := args.(map[string]any) if !ok { - argsMap = make(map[string]interface{}) + argsMap = make(map[string]any) } argsJSON, err := json.Marshal(argsMap) @@ -289,6 +291,44 @@ func createMCPRequest(args any) mcp.CallToolRequest { } } +// createMCPRequestWithSession creates a CallToolRequest with a ServerSession +// that has the given client name in its InitializeParams. This is used to test +// UI capability detection based on ClientInfo.Name. +func createMCPRequestWithSession(t *testing.T, clientName string, args any) mcp.CallToolRequest { + t.Helper() + + argsMap, ok := args.(map[string]any) + if !ok { + argsMap = make(map[string]any) + } + argsJSON, err := json.Marshal(argsMap) + require.NoError(t, err) + + srv := mcp.NewServer(&mcp.Implementation{Name: "test"}, nil) + + st, _ := mcp.NewInMemoryTransports() + session, err := srv.Connect(context.Background(), st, &mcp.ServerSessionOptions{ + State: &mcp.ServerSessionState{ + InitializeParams: &mcp.InitializeParams{ + ClientInfo: &mcp.Implementation{Name: clientName}, + }, + }, + }) + require.NoError(t, err) + + // Close the unused client-side transport and session + t.Cleanup(func() { + _ = session.Close() + }) + + return mcp.CallToolRequest{ + Session: session, + Params: &mcp.CallToolParamsRaw{ + Arguments: json.RawMessage(argsJSON), + }, + } +} + // getTextResult is a helper function that returns a text result from a tool call. func getTextResult(t *testing.T, result *mcp.CallToolResult) *mcp.TextContent { t.Helper() @@ -312,16 +352,16 @@ func getErrorResult(t *testing.T, result *mcp.CallToolResult) *mcp.TextContent { func TestOptionalParamOK(t *testing.T) { tests := []struct { name string - args map[string]interface{} + args map[string]any paramName string - expectedVal interface{} + expectedVal any expectedOk bool expectError bool errorMsg string }{ { name: "present and correct type (string)", - args: map[string]interface{}{"myParam": "hello"}, + args: map[string]any{"myParam": "hello"}, paramName: "myParam", expectedVal: "hello", expectedOk: true, @@ -329,7 +369,7 @@ func TestOptionalParamOK(t *testing.T) { }, { name: "present and correct type (bool)", - args: map[string]interface{}{"myParam": true}, + args: map[string]any{"myParam": true}, paramName: "myParam", expectedVal: true, expectedOk: true, @@ -337,7 +377,7 @@ func TestOptionalParamOK(t *testing.T) { }, { name: "present and correct type (number)", - args: map[string]interface{}{"myParam": float64(123)}, + args: map[string]any{"myParam": float64(123)}, paramName: "myParam", expectedVal: float64(123), expectedOk: true, @@ -345,7 +385,7 @@ func TestOptionalParamOK(t *testing.T) { }, { name: "present but wrong type (string expected, got bool)", - args: map[string]interface{}{"myParam": true}, + args: map[string]any{"myParam": true}, paramName: "myParam", expectedVal: "", // Zero value for string expectedOk: true, // ok is true because param exists @@ -354,7 +394,7 @@ func TestOptionalParamOK(t *testing.T) { }, { name: "present but wrong type (bool expected, got string)", - args: map[string]interface{}{"myParam": "true"}, + args: map[string]any{"myParam": "true"}, paramName: "myParam", expectedVal: false, // Zero value for bool expectedOk: true, // ok is true because param exists @@ -363,7 +403,7 @@ func TestOptionalParamOK(t *testing.T) { }, { name: "parameter not present", - args: map[string]interface{}{"anotherParam": "value"}, + args: map[string]any{"anotherParam": "value"}, paramName: "myParam", expectedVal: "", // Zero value for string expectedOk: false, @@ -531,7 +571,7 @@ func matchPath(pattern, path string) bool { if len(pathParts) < len(patternParts)-1 { return false } - for i := 0; i < len(patternParts)-1; i++ { + for i := range len(patternParts) - 1 { if strings.HasPrefix(patternParts[i], "{") && strings.HasSuffix(patternParts[i], "}") { continue // Path parameter matches anything } diff --git a/pkg/github/issues.go b/pkg/github/issues.go index 62e1a0bacf..cd70855507 100644 --- a/pkg/github/issues.go +++ b/pkg/github/issues.go @@ -9,16 +9,16 @@ import ( "strings" "time" + ghcontext "github.com/github/github-mcp-server/pkg/context" ghErrors "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/inventory" - "github.com/github/github-mcp-server/pkg/lockdown" "github.com/github/github-mcp-server/pkg/octicons" "github.com/github/github-mcp-server/pkg/sanitize" "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" "github.com/go-viper/mapstructure/v2" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/shurcooL/githubv4" @@ -48,7 +48,7 @@ const ( // When duplicateOf is non-zero, it fetches both the main issue and duplicate issue IDs in a single query. func fetchIssueIDs(ctx context.Context, gqlClient *githubv4.Client, owner, repo string, issueNumber int, duplicateOf int) (githubv4.ID, githubv4.ID, error) { // Build query variables common to both cases - vars := map[string]interface{}{ + vars := map[string]any{ "owner": githubv4.String(owner), "repo": githubv4.String(repo), "issueNumber": githubv4.Int(issueNumber), // #nosec G115 - issue numbers are always small positive integers @@ -312,13 +312,13 @@ Options are: switch method { case "get": - result, err := GetIssue(ctx, client, deps.GetRepoAccessCache(), owner, repo, issueNumber, deps.GetFlags()) + result, err := GetIssue(ctx, client, deps, owner, repo, issueNumber) return result, nil, err case "get_comments": - result, err := GetIssueComments(ctx, client, deps.GetRepoAccessCache(), owner, repo, issueNumber, pagination, deps.GetFlags()) + result, err := GetIssueComments(ctx, client, deps, owner, repo, issueNumber, pagination) return result, nil, err case "get_sub_issues": - result, err := GetSubIssues(ctx, client, deps.GetRepoAccessCache(), owner, repo, issueNumber, pagination, deps.GetFlags()) + result, err := GetSubIssues(ctx, client, deps, owner, repo, issueNumber, pagination) return result, nil, err case "get_labels": result, err := GetIssueLabels(ctx, gqlClient, owner, repo, issueNumber) @@ -329,7 +329,13 @@ Options are: }) } -func GetIssue(ctx context.Context, client *github.Client, cache *lockdown.RepoAccessCache, owner string, repo string, issueNumber int, flags FeatureFlags) (*mcp.CallToolResult, error) { +func GetIssue(ctx context.Context, client *github.Client, deps ToolDependencies, owner string, repo string, issueNumber int) (*mcp.CallToolResult, error) { + cache, err := deps.GetRepoAccessCache(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get repo access cache: %w", err) + } + flags := deps.GetFlags(ctx) + issue, resp, err := client.Issues.Get(ctx, owner, repo, issueNumber) if err != nil { return nil, fmt.Errorf("failed to get issue: %w", err) @@ -370,15 +376,18 @@ func GetIssue(ctx context.Context, client *github.Client, cache *lockdown.RepoAc } } - r, err := json.Marshal(issue) - if err != nil { - return nil, fmt.Errorf("failed to marshal issue: %w", err) - } + minimalIssue := convertToMinimalIssue(issue) - return utils.NewToolResultText(string(r)), nil + return MarshalledTextResult(minimalIssue), nil } -func GetIssueComments(ctx context.Context, client *github.Client, cache *lockdown.RepoAccessCache, owner string, repo string, issueNumber int, pagination PaginationParams, flags FeatureFlags) (*mcp.CallToolResult, error) { +func GetIssueComments(ctx context.Context, client *github.Client, deps ToolDependencies, owner string, repo string, issueNumber int, pagination PaginationParams) (*mcp.CallToolResult, error) { + cache, err := deps.GetRepoAccessCache(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get repo access cache: %w", err) + } + flags := deps.GetFlags(ctx) + opts := &github.IssueListCommentsOptions{ ListOptions: github.ListOptions{ Page: pagination.Page, @@ -424,15 +433,21 @@ func GetIssueComments(ctx context.Context, client *github.Client, cache *lockdow comments = filteredComments } - r, err := json.Marshal(comments) - if err != nil { - return nil, fmt.Errorf("failed to marshal response: %w", err) + minimalComments := make([]MinimalIssueComment, 0, len(comments)) + for _, comment := range comments { + minimalComments = append(minimalComments, convertToMinimalIssueComment(comment)) } - return utils.NewToolResultText(string(r)), nil + return MarshalledTextResult(minimalComments), nil } -func GetSubIssues(ctx context.Context, client *github.Client, cache *lockdown.RepoAccessCache, owner string, repo string, issueNumber int, pagination PaginationParams, featureFlags FeatureFlags) (*mcp.CallToolResult, error) { +func GetSubIssues(ctx context.Context, client *github.Client, deps ToolDependencies, owner string, repo string, issueNumber int, pagination PaginationParams) (*mcp.CallToolResult, error) { + cache, err := deps.GetRepoAccessCache(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get repo access cache: %w", err) + } + featureFlags := deps.GetFlags(ctx) + opts := &github.IssueListOptions{ ListOptions: github.ListOptions{ Page: pagination.Page, @@ -976,6 +991,9 @@ func SearchIssues(t translations.TranslationHelperFunc) inventory.ServerTool { } // IssueWrite creates a tool to create a new or update an existing issue in a GitHub repository. +// IssueWriteUIResourceURI is the URI for the issue_write tool's MCP App UI resource. +const IssueWriteUIResourceURI = "ui://github-mcp-server/issue-write" + func IssueWrite(t translations.TranslationHelperFunc) inventory.ServerTool { return NewTool( ToolsetMetadataIssues, @@ -986,6 +1004,12 @@ func IssueWrite(t translations.TranslationHelperFunc) inventory.ServerTool { Title: t("TOOL_ISSUE_WRITE_USER_TITLE", "Create or update issue."), ReadOnlyHint: false, }, + Meta: mcp.Meta{ + "ui": map[string]any{ + "resourceUri": IssueWriteUIResourceURI, + "visibility": []string{"model", "app"}, + }, + }, InputSchema: &jsonschema.Schema{ Type: "object", Properties: map[string]*jsonschema.Schema{ @@ -1059,7 +1083,7 @@ Options are: }, }, []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { + func(ctx context.Context, deps ToolDependencies, req *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { method, err := RequiredParam[string](args, "method") if err != nil { return utils.NewToolResultError(err.Error()), nil, nil @@ -1073,6 +1097,23 @@ Options are: if err != nil { return utils.NewToolResultError(err.Error()), nil, nil } + + // When insiders mode is enabled and the client supports MCP Apps UI, + // check if this is a UI form submission. The UI sends _ui_submitted=true + // to distinguish form submissions from LLM calls. + uiSubmitted, _ := OptionalParam[bool](args, "_ui_submitted") + + if deps.GetFlags(ctx).InsidersMode && clientSupportsUI(req) && !uiSubmitted { + if method == "update" { + issueNumber, numErr := RequiredInt(args, "issue_number") + if numErr != nil { + return utils.NewToolResultError("issue_number is required for update method"), nil, nil + } + return utils.NewToolResultText(fmt.Sprintf("Ready to update issue #%d in %s/%s. The user will review and confirm via the interactive form.", issueNumber, owner, repo)), nil, nil + } + return utils.NewToolResultText(fmt.Sprintf("Ready to create an issue in %s/%s. The user will review and confirm via the interactive form.", owner, repo)), nil, nil + } + title, err := OptionalParam[string](args, "title") if err != nil { return utils.NewToolResultError(err.Error()), nil, nil @@ -1502,7 +1543,7 @@ func ListIssues(t translations.TranslationHelperFunc) inventory.ServerTool { return utils.NewToolResultError(fmt.Sprintf("failed to get GitHub GQL client: %v", err)), nil, nil } - vars := map[string]interface{}{ + vars := map[string]any{ "owner": githubv4.String(owner), "repo": githubv4.String(repo), "states": states, @@ -1561,9 +1602,9 @@ func ListIssues(t translations.TranslationHelperFunc) inventory.ServerTool { } // Create response with issues - response := map[string]interface{}{ + response := map[string]any{ "issues": issues, - "pageInfo": map[string]interface{}{ + "pageInfo": map[string]any{ "hasNextPage": pageInfo.HasNextPage, "hasPreviousPage": pageInfo.HasPreviousPage, "startCursor": string(pageInfo.StartCursor), @@ -1898,7 +1939,7 @@ func AssignCopilotToIssue(t translations.TranslationHelperFunc) inventory.Server // Add the GraphQL-Features header for the agent assignment API // The header will be read by the HTTP transport if it's configured to do so - ctxWithFeatures := withGraphQLFeatures(ctx, "issues_copilot_assignment_api_support") + ctxWithFeatures := ghcontext.WithGraphQLFeatures(ctx, "issues_copilot_assignment_api_support") // Capture the time before assignment to filter out older PRs during polling assignmentTime := time.Now().UTC() @@ -2096,19 +2137,3 @@ func AssignCodingAgentPrompt(t translations.TranslationHelperFunc) inventory.Ser }, ) } - -// graphQLFeaturesKey is a context key for GraphQL feature flags -type graphQLFeaturesKey struct{} - -// withGraphQLFeatures adds GraphQL feature flags to the context -func withGraphQLFeatures(ctx context.Context, features ...string) context.Context { - return context.WithValue(ctx, graphQLFeaturesKey{}, features) -} - -// GetGraphQLFeatures retrieves GraphQL feature flags from the context -func GetGraphQLFeatures(ctx context.Context) []string { - if features, ok := ctx.Value(graphQLFeaturesKey{}).([]string); ok { - return features - } - return nil -} diff --git a/pkg/github/issues_test.go b/pkg/github/issues_test.go index a338efcbab..512ba8a6b2 100644 --- a/pkg/github/issues_test.go +++ b/pkg/github/issues_test.go @@ -15,7 +15,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/lockdown" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/shurcooL/githubv4" "github.com/stretchr/testify/assert" @@ -171,7 +171,7 @@ func Test_GetIssue(t *testing.T) { name string mockedClient *http.Client gqlHTTPClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectHandlerError bool expectResultError bool expectedIssue *github.Issue @@ -183,7 +183,7 @@ func Test_GetIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusOK, mockIssue), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get", "owner": "owner2", "repo": "repo2", @@ -196,7 +196,7 @@ func Test_GetIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Issue not found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get", "owner": "owner", "repo": "repo", @@ -240,7 +240,7 @@ func Test_GetIssue(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get", "owner": "owner2", "repo": "repo2", @@ -291,7 +291,7 @@ func Test_GetIssue(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get", "owner": "owner", "repo": "repo", @@ -345,15 +345,15 @@ func Test_GetIssue(t *testing.T) { textContent := getTextResult(t, result) - var returnedIssue github.Issue + var returnedIssue MinimalIssue err = json.Unmarshal([]byte(textContent.Text), &returnedIssue) require.NoError(t, err) - assert.Equal(t, *tc.expectedIssue.Number, *returnedIssue.Number) - assert.Equal(t, *tc.expectedIssue.Title, *returnedIssue.Title) - assert.Equal(t, *tc.expectedIssue.Body, *returnedIssue.Body) - assert.Equal(t, *tc.expectedIssue.State, *returnedIssue.State) - assert.Equal(t, *tc.expectedIssue.HTMLURL, *returnedIssue.HTMLURL) - assert.Equal(t, *tc.expectedIssue.User.Login, *returnedIssue.User.Login) + assert.Equal(t, tc.expectedIssue.GetNumber(), returnedIssue.Number) + assert.Equal(t, tc.expectedIssue.GetTitle(), returnedIssue.Title) + assert.Equal(t, tc.expectedIssue.GetBody(), returnedIssue.Body) + assert.Equal(t, tc.expectedIssue.GetState(), returnedIssue.State) + assert.Equal(t, tc.expectedIssue.GetHTMLURL(), returnedIssue.HTMLURL) + assert.Equal(t, tc.expectedIssue.GetUser().GetLogin(), returnedIssue.User.Login) }) } } @@ -386,7 +386,7 @@ func Test_AddIssueComment(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedComment *github.IssueComment expectedErrMsg string @@ -396,7 +396,7 @@ func Test_AddIssueComment(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposIssuesCommentsByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusCreated, mockComment), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "issue_number": float64(42), @@ -413,7 +413,7 @@ func Test_AddIssueComment(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Invalid request"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "issue_number": float64(42), @@ -520,7 +520,7 @@ func Test_SearchIssues(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult *github.IssuesSearchResult expectedErrMsg string @@ -541,7 +541,7 @@ func Test_SearchIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "repo:owner/repo is:open", "sort": "created", "order": "desc", @@ -567,7 +567,7 @@ func Test_SearchIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "is:open", "owner": "test-owner", "repo": "test-repo", @@ -591,7 +591,7 @@ func Test_SearchIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "bug", "owner": "test-owner", }, @@ -612,7 +612,7 @@ func Test_SearchIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "feature", "repo": "test-repo", }, @@ -624,7 +624,7 @@ func Test_SearchIssues(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetSearchIssues: mockResponse(t, http.StatusOK, mockSearchResult), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "is:issue repo:owner/repo is:open", }, expectError: false, @@ -644,7 +644,7 @@ func Test_SearchIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "repo:github/github-mcp-server is:issue is:open (label:critical OR label:urgent)", }, expectError: false, @@ -664,7 +664,7 @@ func Test_SearchIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "repo:github/github-mcp-server critical", "owner": "different-owner", "repo": "different-repo", @@ -686,7 +686,7 @@ func Test_SearchIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "is:issue repo:octocat/Hello-World bug", }, expectError: false, @@ -706,7 +706,7 @@ func Test_SearchIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "repo:github/github-mcp-server is:issue (label:critical OR label:urgent OR label:high-priority OR label:blocker)", }, expectError: false, @@ -720,7 +720,7 @@ func Test_SearchIssues(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Validation Failed"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "invalid:query", }, expectError: true, @@ -812,7 +812,7 @@ func Test_CreateIssue(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedIssue *github.Issue expectedErrMsg string @@ -831,7 +831,7 @@ func Test_CreateIssue(t *testing.T) { mockResponse(t, http.StatusCreated, mockIssue), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "create", "owner": "owner", "repo": "repo", @@ -855,7 +855,7 @@ func Test_CreateIssue(t *testing.T) { State: github.Ptr("open"), }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "create", "owner": "owner", "repo": "repo", @@ -878,7 +878,7 @@ func Test_CreateIssue(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Validation failed"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "create", "owner": "owner", "repo": "repo", @@ -933,6 +933,76 @@ func Test_CreateIssue(t *testing.T) { } } +// Test_IssueWrite_InsidersMode_UIGate verifies the insiders mode UI gate +// behavior: UI clients get a form message, non-UI clients execute directly. +func Test_IssueWrite_InsidersMode_UIGate(t *testing.T) { + t.Parallel() + + mockIssue := &github.Issue{ + Number: github.Ptr(1), + Title: github.Ptr("Test"), + HTMLURL: github.Ptr("https://github.com/owner/repo/issues/1"), + } + + serverTool := IssueWrite(translations.NullTranslationHelper) + + client := github.NewClient(MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ + PostReposIssuesByOwnerByRepo: mockResponse(t, http.StatusCreated, mockIssue), + })) + + deps := BaseDeps{ + Client: client, + GQLClient: githubv4.NewClient(nil), + Flags: FeatureFlags{InsidersMode: true}, + } + handler := serverTool.Handler(deps) + + t.Run("UI client without _ui_submitted returns form message", func(t *testing.T) { + request := createMCPRequestWithSession(t, "Visual Studio Code - Insiders", map[string]any{ + "method": "create", + "owner": "owner", + "repo": "repo", + "title": "Test", + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + require.NoError(t, err) + + textContent := getTextResult(t, result) + assert.Contains(t, textContent.Text, "Ready to create an issue") + }) + + t.Run("UI client with _ui_submitted executes directly", func(t *testing.T) { + request := createMCPRequestWithSession(t, "Visual Studio Code - Insiders", map[string]any{ + "method": "create", + "owner": "owner", + "repo": "repo", + "title": "Test", + "_ui_submitted": true, + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + require.NoError(t, err) + + textContent := getTextResult(t, result) + assert.Contains(t, textContent.Text, "https://github.com/owner/repo/issues/1", + "tool should return the created issue URL") + }) + + t.Run("non-UI client executes directly without _ui_submitted", func(t *testing.T) { + request := createMCPRequest(map[string]any{ + "method": "create", + "owner": "owner", + "repo": "repo", + "title": "Test", + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + require.NoError(t, err) + + textContent := getTextResult(t, result) + assert.Contains(t, textContent.Text, "https://github.com/owner/repo/issues/1", + "non-UI client should execute directly") + }) +} + func Test_ListIssues(t *testing.T) { // Verify tool definition serverTool := ListIssues(translations.NullTranslationHelper) @@ -1061,51 +1131,51 @@ func Test_ListIssues(t *testing.T) { mockErrorRepoNotFound := githubv4mock.ErrorResponse("repository not found") // Variables matching what GraphQL receives after JSON marshaling/unmarshaling - varsListAll := map[string]interface{}{ + varsListAll := map[string]any{ "owner": "owner", "repo": "repo", - "states": []interface{}{"OPEN", "CLOSED"}, + "states": []any{"OPEN", "CLOSED"}, "orderBy": "CREATED_AT", "direction": "DESC", "first": float64(30), "after": (*string)(nil), } - varsOpenOnly := map[string]interface{}{ + varsOpenOnly := map[string]any{ "owner": "owner", "repo": "repo", - "states": []interface{}{"OPEN"}, + "states": []any{"OPEN"}, "orderBy": "CREATED_AT", "direction": "DESC", "first": float64(30), "after": (*string)(nil), } - varsClosedOnly := map[string]interface{}{ + varsClosedOnly := map[string]any{ "owner": "owner", "repo": "repo", - "states": []interface{}{"CLOSED"}, + "states": []any{"CLOSED"}, "orderBy": "CREATED_AT", "direction": "DESC", "first": float64(30), "after": (*string)(nil), } - varsWithLabels := map[string]interface{}{ + varsWithLabels := map[string]any{ "owner": "owner", "repo": "repo", - "states": []interface{}{"OPEN", "CLOSED"}, - "labels": []interface{}{"bug", "enhancement"}, + "states": []any{"OPEN", "CLOSED"}, + "labels": []any{"bug", "enhancement"}, "orderBy": "CREATED_AT", "direction": "DESC", "first": float64(30), "after": (*string)(nil), } - varsRepoNotFound := map[string]interface{}{ + varsRepoNotFound := map[string]any{ "owner": "owner", "repo": "nonexistent-repo", - "states": []interface{}{"OPEN", "CLOSED"}, + "states": []any{"OPEN", "CLOSED"}, "orderBy": "CREATED_AT", "direction": "DESC", "first": float64(30), @@ -1114,7 +1184,7 @@ func Test_ListIssues(t *testing.T) { tests := []struct { name string - reqParams map[string]interface{} + reqParams map[string]any expectError bool errContains string expectedCount int @@ -1122,7 +1192,7 @@ func Test_ListIssues(t *testing.T) { }{ { name: "list all issues", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -1131,7 +1201,7 @@ func Test_ListIssues(t *testing.T) { }, { name: "filter by open state", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "state": "OPEN", @@ -1141,7 +1211,7 @@ func Test_ListIssues(t *testing.T) { }, { name: "filter by open state - lc", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "state": "open", @@ -1151,7 +1221,7 @@ func Test_ListIssues(t *testing.T) { }, { name: "filter by closed state", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "state": "CLOSED", @@ -1161,7 +1231,7 @@ func Test_ListIssues(t *testing.T) { }, { name: "filter by labels", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "repo", "labels": []any{"bug", "enhancement"}, @@ -1171,7 +1241,7 @@ func Test_ListIssues(t *testing.T) { }, { name: "repository not found error", - reqParams: map[string]interface{}{ + reqParams: map[string]any{ "owner": "owner", "repo": "nonexistent-repo", }, @@ -1362,7 +1432,7 @@ func Test_UpdateIssue(t *testing.T) { name string mockedRESTClient *http.Client mockedGQLClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedIssue *github.Issue expectedErrMsg string @@ -1370,7 +1440,7 @@ func Test_UpdateIssue(t *testing.T) { { name: "partial update of non-state fields only", mockedRESTClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PatchReposIssuesByOwnerByRepoByIssueNumber: expectRequestBody(t, map[string]interface{}{ + PatchReposIssuesByOwnerByRepoByIssueNumber: expectRequestBody(t, map[string]any{ "title": "Updated Title", "body": "Updated Description", }).andThen( @@ -1378,7 +1448,7 @@ func Test_UpdateIssue(t *testing.T) { ), }), mockedGQLClient: githubv4mock.NewMockedHTTPClient(), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "update", "owner": "owner", "repo": "repo", @@ -1398,7 +1468,7 @@ func Test_UpdateIssue(t *testing.T) { }), }), mockedGQLClient: githubv4mock.NewMockedHTTPClient(), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "update", "owner": "owner", "repo": "repo", @@ -1453,7 +1523,7 @@ func Test_UpdateIssue(t *testing.T) { closeSuccessResponse, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "update", "owner": "owner", "repo": "repo", @@ -1504,7 +1574,7 @@ func Test_UpdateIssue(t *testing.T) { reopenSuccessResponse, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "update", "owner": "owner", "repo": "repo", @@ -1536,7 +1606,7 @@ func Test_UpdateIssue(t *testing.T) { githubv4mock.ErrorResponse("Could not resolve to an Issue with the number of 999."), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "update", "owner": "owner", "repo": "repo", @@ -1573,7 +1643,7 @@ func Test_UpdateIssue(t *testing.T) { githubv4mock.ErrorResponse("Could not resolve to an Issue with the number of 999."), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "update", "owner": "owner", "repo": "repo", @@ -1588,7 +1658,7 @@ func Test_UpdateIssue(t *testing.T) { { name: "close as duplicate with combined non-state updates", mockedRESTClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PatchReposIssuesByOwnerByRepoByIssueNumber: expectRequestBody(t, map[string]interface{}{ + PatchReposIssuesByOwnerByRepoByIssueNumber: expectRequestBody(t, map[string]any{ "title": "Updated Title", "body": "Updated Description", "labels": []any{"bug", "priority"}, @@ -1649,7 +1719,7 @@ func Test_UpdateIssue(t *testing.T) { closeSuccessResponse, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "update", "owner": "owner", "repo": "repo", @@ -1671,7 +1741,7 @@ func Test_UpdateIssue(t *testing.T) { name: "duplicate_of without duplicate state_reason should fail", mockedRESTClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), mockedGQLClient: githubv4mock.NewMockedHTTPClient(), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "update", "owner": "owner", "repo": "repo", @@ -1823,7 +1893,7 @@ func Test_GetIssueComments(t *testing.T) { name string mockedClient *http.Client gqlHTTPClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedComments []*github.IssueComment expectedErrMsg string @@ -1834,7 +1904,7 @@ func Test_GetIssueComments(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesCommentsByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusOK, mockComments), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_comments", "owner": "owner", "repo": "repo", @@ -1853,7 +1923,7 @@ func Test_GetIssueComments(t *testing.T) { mockResponse(t, http.StatusOK, mockComments), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_comments", "owner": "owner", "repo": "repo", @@ -1869,7 +1939,7 @@ func Test_GetIssueComments(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesCommentsByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Issue not found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_comments", "owner": "owner", "repo": "repo", @@ -1895,7 +1965,7 @@ func Test_GetIssueComments(t *testing.T) { }), }), gqlHTTPClient: newRepoAccessHTTPClient(), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_comments", "owner": "owner", "repo": "repo", @@ -1950,16 +2020,16 @@ func Test_GetIssueComments(t *testing.T) { textContent := getTextResult(t, result) // Unmarshal and verify the result - var returnedComments []*github.IssueComment + var returnedComments []MinimalIssueComment err = json.Unmarshal([]byte(textContent.Text), &returnedComments) require.NoError(t, err) assert.Equal(t, len(tc.expectedComments), len(returnedComments)) for i := range tc.expectedComments { require.NotNil(t, tc.expectedComments[i].User) require.NotNil(t, returnedComments[i].User) - assert.Equal(t, tc.expectedComments[i].GetID(), returnedComments[i].GetID()) - assert.Equal(t, tc.expectedComments[i].GetBody(), returnedComments[i].GetBody()) - assert.Equal(t, tc.expectedComments[i].GetUser().GetLogin(), returnedComments[i].GetUser().GetLogin()) + assert.Equal(t, tc.expectedComments[i].GetID(), returnedComments[i].ID) + assert.Equal(t, tc.expectedComments[i].GetBody(), returnedComments[i].Body) + assert.Equal(t, tc.expectedComments[i].GetUser().GetLogin(), returnedComments[i].User.Login) } }) } @@ -2834,7 +2904,7 @@ func Test_AddSubIssue(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedIssue *github.Issue expectedErrMsg string @@ -2844,7 +2914,7 @@ func Test_AddSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusCreated, mockIssue), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "owner": "owner", "repo": "repo", @@ -2860,7 +2930,7 @@ func Test_AddSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusCreated, mockIssue), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "owner": "owner", "repo": "repo", @@ -2875,7 +2945,7 @@ func Test_AddSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusCreated, mockIssue), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "owner": "owner", "repo": "repo", @@ -2891,7 +2961,7 @@ func Test_AddSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Parent issue not found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "owner": "owner", "repo": "repo", @@ -2906,7 +2976,7 @@ func Test_AddSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Sub-issue not found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "owner": "owner", "repo": "repo", @@ -2921,7 +2991,7 @@ func Test_AddSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusUnprocessableEntity, `{"message": "Validation failed", "errors": [{"message": "Sub-issue cannot be a parent of itself"}]}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "owner": "owner", "repo": "repo", @@ -2936,7 +3006,7 @@ func Test_AddSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusForbidden, `{"message": "Must have write access to repository"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "owner": "owner", "repo": "repo", @@ -2949,7 +3019,7 @@ func Test_AddSubIssue(t *testing.T) { { name: "missing required parameter owner", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "repo": "repo", "issue_number": float64(42), @@ -2961,7 +3031,7 @@ func Test_AddSubIssue(t *testing.T) { { name: "missing required parameter sub_issue_id", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "add", "owner": "owner", "repo": "repo", @@ -3073,7 +3143,7 @@ func Test_GetSubIssues(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedSubIssues []*github.Issue expectedErrMsg string @@ -3083,7 +3153,7 @@ func Test_GetSubIssues(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusOK, mockSubIssues), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_sub_issues", "owner": "owner", "repo": "repo", @@ -3102,7 +3172,7 @@ func Test_GetSubIssues(t *testing.T) { mockResponse(t, http.StatusOK, mockSubIssues), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_sub_issues", "owner": "owner", "repo": "repo", @@ -3118,7 +3188,7 @@ func Test_GetSubIssues(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusOK, []*github.Issue{}), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_sub_issues", "owner": "owner", "repo": "repo", @@ -3132,7 +3202,7 @@ func Test_GetSubIssues(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Not Found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_sub_issues", "owner": "owner", "repo": "repo", @@ -3146,7 +3216,7 @@ func Test_GetSubIssues(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Not Found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_sub_issues", "owner": "nonexistent", "repo": "repo", @@ -3160,7 +3230,7 @@ func Test_GetSubIssues(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposIssuesSubIssuesByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusGone, `{"message": "This feature has been deprecated"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_sub_issues", "owner": "owner", "repo": "repo", @@ -3172,7 +3242,7 @@ func Test_GetSubIssues(t *testing.T) { { name: "missing required parameter owner", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_sub_issues", "repo": "repo", "issue_number": float64(42), @@ -3183,7 +3253,7 @@ func Test_GetSubIssues(t *testing.T) { { name: "missing required parameter issue_number", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_sub_issues", "owner": "owner", "repo": "repo", @@ -3291,7 +3361,7 @@ func Test_RemoveSubIssue(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedIssue *github.Issue expectedErrMsg string @@ -3301,7 +3371,7 @@ func Test_RemoveSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteReposIssuesSubIssueByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusOK, mockIssue), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "remove", "owner": "owner", "repo": "repo", @@ -3316,7 +3386,7 @@ func Test_RemoveSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteReposIssuesSubIssueByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Not Found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "remove", "owner": "owner", "repo": "repo", @@ -3331,7 +3401,7 @@ func Test_RemoveSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteReposIssuesSubIssueByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Sub-issue not found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "remove", "owner": "owner", "repo": "repo", @@ -3346,7 +3416,7 @@ func Test_RemoveSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteReposIssuesSubIssueByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusBadRequest, `{"message": "Invalid sub_issue_id"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "remove", "owner": "owner", "repo": "repo", @@ -3361,7 +3431,7 @@ func Test_RemoveSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteReposIssuesSubIssueByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Not Found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "remove", "owner": "nonexistent", "repo": "repo", @@ -3376,7 +3446,7 @@ func Test_RemoveSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteReposIssuesSubIssueByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusForbidden, `{"message": "Must have write access to repository"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "remove", "owner": "owner", "repo": "repo", @@ -3389,7 +3459,7 @@ func Test_RemoveSubIssue(t *testing.T) { { name: "missing required parameter owner", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "remove", "repo": "repo", "issue_number": float64(42), @@ -3401,7 +3471,7 @@ func Test_RemoveSubIssue(t *testing.T) { { name: "missing required parameter sub_issue_id", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "remove", "owner": "owner", "repo": "repo", @@ -3499,7 +3569,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedIssue *github.Issue expectedErrMsg string @@ -3509,7 +3579,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchReposIssuesSubIssuesPriorityByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusOK, mockIssue), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3525,7 +3595,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchReposIssuesSubIssuesPriorityByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusOK, mockIssue), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3539,7 +3609,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { { name: "validation error - neither after_id nor before_id specified", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3552,7 +3622,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { { name: "validation error - both after_id and before_id specified", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3569,7 +3639,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchReposIssuesSubIssuesPriorityByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Not Found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3585,7 +3655,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchReposIssuesSubIssuesPriorityByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusNotFound, `{"message": "Sub-issue not found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3601,7 +3671,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchReposIssuesSubIssuesPriorityByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusUnprocessableEntity, `{"message": "Validation failed", "errors": [{"message": "Positioning sub-issue not found"}]}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3617,7 +3687,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchReposIssuesSubIssuesPriorityByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusForbidden, `{"message": "Must have write access to repository"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3633,7 +3703,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchReposIssuesSubIssuesPriorityByOwnerByRepoByIssueNumber: mockResponse(t, http.StatusServiceUnavailable, `{"message": "Service Unavailable"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3647,7 +3717,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { { name: "missing required parameter owner", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "repo": "repo", "issue_number": float64(42), @@ -3660,7 +3730,7 @@ func Test_ReprioritizeSubIssue(t *testing.T) { { name: "missing required parameter sub_issue_id", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "reprioritize", "owner": "owner", "repo": "repo", @@ -3750,7 +3820,7 @@ func Test_ListIssueTypes(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedIssueTypes []*github.IssueType expectedErrMsg string @@ -3760,7 +3830,7 @@ func Test_ListIssueTypes(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ "GET /orgs/testorg/issue-types": mockResponse(t, http.StatusOK, mockIssueTypes), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "testorg", }, expectError: false, @@ -3771,7 +3841,7 @@ func Test_ListIssueTypes(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ "GET /orgs/nonexistent/issue-types": mockResponse(t, http.StatusNotFound, `{"message": "Organization not found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "nonexistent", }, expectError: true, @@ -3782,7 +3852,7 @@ func Test_ListIssueTypes(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ "GET /orgs/testorg/issue-types": mockResponse(t, http.StatusOK, mockIssueTypes), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: false, // This should be handled by parameter validation, error returned in result expectedErrMsg: "missing required parameter: owner", }, diff --git a/pkg/github/minimal_types.go b/pkg/github/minimal_types.go index c6a0ea8499..f8c82d78e5 100644 --- a/pkg/github/minimal_types.go +++ b/pkg/github/minimal_types.go @@ -1,7 +1,9 @@ package github import ( - "github.com/google/go-github/v79/github" + "time" + + "github.com/google/go-github/v82/github" ) // MinimalUser is the output type for user and organization search results. @@ -134,8 +136,355 @@ type MinimalProject struct { OwnerType string `json:"owner_type,omitempty"` } +// MinimalReactions is the trimmed output type for reaction summaries, dropping the API URL. +type MinimalReactions struct { + TotalCount int `json:"total_count"` + PlusOne int `json:"+1"` + MinusOne int `json:"-1"` + Laugh int `json:"laugh"` + Confused int `json:"confused"` + Heart int `json:"heart"` + Hooray int `json:"hooray"` + Rocket int `json:"rocket"` + Eyes int `json:"eyes"` +} + +// MinimalIssue is the trimmed output type for issue objects to reduce verbosity. +type MinimalIssue struct { + Number int `json:"number"` + Title string `json:"title"` + Body string `json:"body,omitempty"` + State string `json:"state"` + StateReason string `json:"state_reason,omitempty"` + Draft bool `json:"draft,omitempty"` + Locked bool `json:"locked,omitempty"` + HTMLURL string `json:"html_url"` + User *MinimalUser `json:"user,omitempty"` + AuthorAssociation string `json:"author_association,omitempty"` + Labels []string `json:"labels,omitempty"` + Assignees []string `json:"assignees,omitempty"` + Milestone string `json:"milestone,omitempty"` + Comments int `json:"comments,omitempty"` + Reactions *MinimalReactions `json:"reactions,omitempty"` + CreatedAt string `json:"created_at,omitempty"` + UpdatedAt string `json:"updated_at,omitempty"` + ClosedAt string `json:"closed_at,omitempty"` + ClosedBy string `json:"closed_by,omitempty"` + IssueType string `json:"issue_type,omitempty"` +} + +// MinimalIssueComment is the trimmed output type for issue comment objects to reduce verbosity. +type MinimalIssueComment struct { + ID int64 `json:"id"` + Body string `json:"body,omitempty"` + HTMLURL string `json:"html_url"` + User *MinimalUser `json:"user,omitempty"` + AuthorAssociation string `json:"author_association,omitempty"` + Reactions *MinimalReactions `json:"reactions,omitempty"` + CreatedAt string `json:"created_at,omitempty"` + UpdatedAt string `json:"updated_at,omitempty"` +} + +// MinimalFileContentResponse is the trimmed output type for create/update/delete file responses. +type MinimalFileContentResponse struct { + Content *MinimalFileContent `json:"content,omitempty"` + Commit *MinimalFileCommit `json:"commit,omitempty"` +} + +// MinimalFileContent is the trimmed content portion of a file operation response. +type MinimalFileContent struct { + Name string `json:"name"` + Path string `json:"path"` + SHA string `json:"sha"` + Size int `json:"size,omitempty"` + HTMLURL string `json:"html_url"` +} + +// MinimalFileCommit is the trimmed commit portion of a file operation response. +type MinimalFileCommit struct { + SHA string `json:"sha"` + Message string `json:"message,omitempty"` + HTMLURL string `json:"html_url,omitempty"` + Author *MinimalCommitAuthor `json:"author,omitempty"` +} + +// MinimalPullRequest is the trimmed output type for pull request objects to reduce verbosity. +type MinimalPullRequest struct { + Number int `json:"number"` + Title string `json:"title"` + Body string `json:"body,omitempty"` + State string `json:"state"` + Draft bool `json:"draft"` + Merged bool `json:"merged"` + MergeableState string `json:"mergeable_state,omitempty"` + HTMLURL string `json:"html_url"` + User *MinimalUser `json:"user,omitempty"` + Labels []string `json:"labels,omitempty"` + Assignees []string `json:"assignees,omitempty"` + RequestedReviewers []string `json:"requested_reviewers,omitempty"` + MergedBy string `json:"merged_by,omitempty"` + Head *MinimalPRBranch `json:"head,omitempty"` + Base *MinimalPRBranch `json:"base,omitempty"` + Additions int `json:"additions,omitempty"` + Deletions int `json:"deletions,omitempty"` + ChangedFiles int `json:"changed_files,omitempty"` + Commits int `json:"commits,omitempty"` + Comments int `json:"comments,omitempty"` + CreatedAt string `json:"created_at,omitempty"` + UpdatedAt string `json:"updated_at,omitempty"` + ClosedAt string `json:"closed_at,omitempty"` + MergedAt string `json:"merged_at,omitempty"` + Milestone string `json:"milestone,omitempty"` +} + +// MinimalPRBranch is the trimmed output type for pull request branch references. +type MinimalPRBranch struct { + Ref string `json:"ref"` + SHA string `json:"sha"` + Repo *MinimalPRBranchRepo `json:"repo,omitempty"` +} + +// MinimalPRBranchRepo is the trimmed repo info nested inside a PR branch. +type MinimalPRBranchRepo struct { + FullName string `json:"full_name"` + Description string `json:"description,omitempty"` +} + +type MinimalProjectStatusUpdate struct { + ID string `json:"id"` + Body string `json:"body,omitempty"` + Status string `json:"status,omitempty"` + CreatedAt string `json:"created_at,omitempty"` + StartDate string `json:"start_date,omitempty"` + TargetDate string `json:"target_date,omitempty"` + Creator *MinimalUser `json:"creator,omitempty"` +} + // Helper functions +func convertToMinimalIssue(issue *github.Issue) MinimalIssue { + m := MinimalIssue{ + Number: issue.GetNumber(), + Title: issue.GetTitle(), + Body: issue.GetBody(), + State: issue.GetState(), + StateReason: issue.GetStateReason(), + Draft: issue.GetDraft(), + Locked: issue.GetLocked(), + HTMLURL: issue.GetHTMLURL(), + User: convertToMinimalUser(issue.GetUser()), + AuthorAssociation: issue.GetAuthorAssociation(), + Comments: issue.GetComments(), + } + + if issue.CreatedAt != nil { + m.CreatedAt = issue.CreatedAt.Format(time.RFC3339) + } + if issue.UpdatedAt != nil { + m.UpdatedAt = issue.UpdatedAt.Format(time.RFC3339) + } + if issue.ClosedAt != nil { + m.ClosedAt = issue.ClosedAt.Format(time.RFC3339) + } + + for _, label := range issue.Labels { + if label != nil { + m.Labels = append(m.Labels, label.GetName()) + } + } + + for _, assignee := range issue.Assignees { + if assignee != nil { + m.Assignees = append(m.Assignees, assignee.GetLogin()) + } + } + + if closedBy := issue.GetClosedBy(); closedBy != nil { + m.ClosedBy = closedBy.GetLogin() + } + + if milestone := issue.GetMilestone(); milestone != nil { + m.Milestone = milestone.GetTitle() + } + + if issueType := issue.GetType(); issueType != nil { + m.IssueType = issueType.GetName() + } + + if r := issue.Reactions; r != nil { + m.Reactions = &MinimalReactions{ + TotalCount: r.GetTotalCount(), + PlusOne: r.GetPlusOne(), + MinusOne: r.GetMinusOne(), + Laugh: r.GetLaugh(), + Confused: r.GetConfused(), + Heart: r.GetHeart(), + Hooray: r.GetHooray(), + Rocket: r.GetRocket(), + Eyes: r.GetEyes(), + } + } + + return m +} + +func convertToMinimalIssueComment(comment *github.IssueComment) MinimalIssueComment { + m := MinimalIssueComment{ + ID: comment.GetID(), + Body: comment.GetBody(), + HTMLURL: comment.GetHTMLURL(), + User: convertToMinimalUser(comment.GetUser()), + AuthorAssociation: comment.GetAuthorAssociation(), + } + + if comment.CreatedAt != nil { + m.CreatedAt = comment.CreatedAt.Format(time.RFC3339) + } + if comment.UpdatedAt != nil { + m.UpdatedAt = comment.UpdatedAt.Format(time.RFC3339) + } + + if r := comment.Reactions; r != nil { + m.Reactions = &MinimalReactions{ + TotalCount: r.GetTotalCount(), + PlusOne: r.GetPlusOne(), + MinusOne: r.GetMinusOne(), + Laugh: r.GetLaugh(), + Confused: r.GetConfused(), + Heart: r.GetHeart(), + Hooray: r.GetHooray(), + Rocket: r.GetRocket(), + Eyes: r.GetEyes(), + } + } + + return m +} + +func convertToMinimalFileContentResponse(resp *github.RepositoryContentResponse) MinimalFileContentResponse { + m := MinimalFileContentResponse{} + + if resp == nil { + return m + } + + if c := resp.Content; c != nil { + m.Content = &MinimalFileContent{ + Name: c.GetName(), + Path: c.GetPath(), + SHA: c.GetSHA(), + Size: c.GetSize(), + HTMLURL: c.GetHTMLURL(), + } + } + + m.Commit = &MinimalFileCommit{ + SHA: resp.Commit.GetSHA(), + Message: resp.Commit.GetMessage(), + HTMLURL: resp.Commit.GetHTMLURL(), + } + + if author := resp.Commit.Author; author != nil { + m.Commit.Author = &MinimalCommitAuthor{ + Name: author.GetName(), + Email: author.GetEmail(), + } + if author.Date != nil { + m.Commit.Author.Date = author.Date.Format(time.RFC3339) + } + } + + return m +} + +func convertToMinimalPullRequest(pr *github.PullRequest) MinimalPullRequest { + m := MinimalPullRequest{ + Number: pr.GetNumber(), + Title: pr.GetTitle(), + Body: pr.GetBody(), + State: pr.GetState(), + Draft: pr.GetDraft(), + Merged: pr.GetMerged(), + MergeableState: pr.GetMergeableState(), + HTMLURL: pr.GetHTMLURL(), + User: convertToMinimalUser(pr.GetUser()), + Additions: pr.GetAdditions(), + Deletions: pr.GetDeletions(), + ChangedFiles: pr.GetChangedFiles(), + Commits: pr.GetCommits(), + Comments: pr.GetComments(), + } + + if pr.CreatedAt != nil { + m.CreatedAt = pr.CreatedAt.Format(time.RFC3339) + } + if pr.UpdatedAt != nil { + m.UpdatedAt = pr.UpdatedAt.Format(time.RFC3339) + } + if pr.ClosedAt != nil { + m.ClosedAt = pr.ClosedAt.Format(time.RFC3339) + } + if pr.MergedAt != nil { + m.MergedAt = pr.MergedAt.Format(time.RFC3339) + } + + for _, label := range pr.Labels { + if label != nil { + m.Labels = append(m.Labels, label.GetName()) + } + } + + for _, assignee := range pr.Assignees { + if assignee != nil { + m.Assignees = append(m.Assignees, assignee.GetLogin()) + } + } + + for _, reviewer := range pr.RequestedReviewers { + if reviewer != nil { + m.RequestedReviewers = append(m.RequestedReviewers, reviewer.GetLogin()) + } + } + + if mergedBy := pr.GetMergedBy(); mergedBy != nil { + m.MergedBy = mergedBy.GetLogin() + } + + if head := pr.Head; head != nil { + m.Head = convertToMinimalPRBranch(head) + } + + if base := pr.Base; base != nil { + m.Base = convertToMinimalPRBranch(base) + } + + if milestone := pr.GetMilestone(); milestone != nil { + m.Milestone = milestone.GetTitle() + } + + return m +} + +func convertToMinimalPRBranch(branch *github.PullRequestBranch) *MinimalPRBranch { + if branch == nil { + return nil + } + + b := &MinimalPRBranch{ + Ref: branch.GetRef(), + SHA: branch.GetSHA(), + } + + if repo := branch.GetRepo(); repo != nil { + b.Repo = &MinimalPRBranchRepo{ + FullName: repo.GetFullName(), + Description: repo.GetDescription(), + } + } + + return b +} + func convertToMinimalProject(fullProject *github.ProjectV2) *MinimalProject { if fullProject == nil { return nil @@ -190,7 +539,7 @@ func convertToMinimalCommit(commit *github.RepositoryCommit, includeDiffs bool) Email: commit.Commit.Author.GetEmail(), } if commit.Commit.Author.Date != nil { - minimalCommit.Commit.Author.Date = commit.Commit.Author.Date.Format("2006-01-02T15:04:05Z") + minimalCommit.Commit.Author.Date = commit.Commit.Author.Date.Format(time.RFC3339) } } @@ -200,7 +549,7 @@ func convertToMinimalCommit(commit *github.RepositoryCommit, includeDiffs bool) Email: commit.Commit.Committer.GetEmail(), } if commit.Commit.Committer.Date != nil { - minimalCommit.Commit.Committer.Date = commit.Commit.Committer.Date.Format("2006-01-02T15:04:05Z") + minimalCommit.Commit.Committer.Date = commit.Commit.Committer.Date.Format(time.RFC3339) } } } diff --git a/pkg/github/notifications.go b/pkg/github/notifications.go index 1d695beb3b..ddd3023932 100644 --- a/pkg/github/notifications.go +++ b/pkg/github/notifications.go @@ -14,7 +14,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/notifications_test.go b/pkg/github/notifications_test.go index d2124ae3d8..030367d067 100644 --- a/pkg/github/notifications_test.go +++ b/pkg/github/notifications_test.go @@ -8,7 +8,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -42,7 +42,7 @@ func Test_ListNotifications(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult []*github.Notification expectedErrMsg string @@ -52,7 +52,7 @@ func Test_ListNotifications(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetNotifications: mockResponse(t, http.StatusOK, []*github.Notification{mockNotification}), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: false, expectedResult: []*github.Notification{mockNotification}, }, @@ -61,7 +61,7 @@ func Test_ListNotifications(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetNotifications: mockResponse(t, http.StatusOK, []*github.Notification{mockNotification}), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "filter": "include_read_notifications", }, expectError: false, @@ -72,7 +72,7 @@ func Test_ListNotifications(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetNotifications: mockResponse(t, http.StatusOK, []*github.Notification{mockNotification}), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "filter": "only_participating", }, expectError: false, @@ -83,7 +83,7 @@ func Test_ListNotifications(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposNotificationsByOwnerByRepo: mockResponse(t, http.StatusOK, []*github.Notification{mockNotification}), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "filter": "default", "since": "2024-01-01T00:00:00Z", "before": "2024-01-02T00:00:00Z", @@ -100,7 +100,7 @@ func Test_ListNotifications(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetNotifications: mockResponse(t, http.StatusInternalServerError, `{"message": "error"}`), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: true, expectedErrMsg: "error", }, @@ -159,7 +159,7 @@ func Test_ManageNotificationSubscription(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectIgnored *bool expectDeleted bool @@ -171,7 +171,7 @@ func Test_ManageNotificationSubscription(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PutNotificationsThreadsSubscriptionByThreadID: mockResponse(t, http.StatusOK, mockSub), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "notificationID": "123", "action": "ignore", }, @@ -183,7 +183,7 @@ func Test_ManageNotificationSubscription(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PutNotificationsThreadsSubscriptionByThreadID: mockResponse(t, http.StatusOK, mockSubWatch), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "notificationID": "123", "action": "watch", }, @@ -195,7 +195,7 @@ func Test_ManageNotificationSubscription(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteNotificationsThreadsSubscriptionByThreadID: mockResponse(t, http.StatusOK, nil), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "notificationID": "123", "action": "delete", }, @@ -205,7 +205,7 @@ func Test_ManageNotificationSubscription(t *testing.T) { { name: "invalid action", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "notificationID": "123", "action": "invalid", }, @@ -215,7 +215,7 @@ func Test_ManageNotificationSubscription(t *testing.T) { { name: "missing required notificationID", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "action": "ignore", }, expectError: true, @@ -223,7 +223,7 @@ func Test_ManageNotificationSubscription(t *testing.T) { { name: "missing required action", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "notificationID": "123", }, expectError: true, @@ -296,7 +296,7 @@ func Test_ManageRepositoryNotificationSubscription(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectIgnored *bool expectSubscribed *bool @@ -309,7 +309,7 @@ func Test_ManageRepositoryNotificationSubscription(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PutReposSubscriptionByOwnerByRepo: mockResponse(t, http.StatusOK, mockSub), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "action": "ignore", @@ -322,7 +322,7 @@ func Test_ManageRepositoryNotificationSubscription(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PutReposSubscriptionByOwnerByRepo: mockResponse(t, http.StatusOK, mockWatchSub), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "action": "watch", @@ -336,7 +336,7 @@ func Test_ManageRepositoryNotificationSubscription(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteReposSubscriptionByOwnerByRepo: mockResponse(t, http.StatusOK, nil), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "action": "delete", @@ -347,7 +347,7 @@ func Test_ManageRepositoryNotificationSubscription(t *testing.T) { { name: "invalid action", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "action": "invalid", @@ -358,7 +358,7 @@ func Test_ManageRepositoryNotificationSubscription(t *testing.T) { { name: "missing required owner", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "repo": "repo", "action": "ignore", }, @@ -367,7 +367,7 @@ func Test_ManageRepositoryNotificationSubscription(t *testing.T) { { name: "missing required repo", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "action": "ignore", }, @@ -376,7 +376,7 @@ func Test_ManageRepositoryNotificationSubscription(t *testing.T) { { name: "missing required action", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -452,7 +452,7 @@ func Test_DismissNotification(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectRead bool expectDone bool @@ -464,7 +464,7 @@ func Test_DismissNotification(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PatchNotificationsThreadsByThreadID: mockResponse(t, http.StatusOK, nil), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "threadID": "123", "state": "read", }, @@ -476,7 +476,7 @@ func Test_DismissNotification(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteNotificationsThreadsByThreadID: mockResponse(t, http.StatusNoContent, nil), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "threadID": "123", "state": "done", }, @@ -488,7 +488,7 @@ func Test_DismissNotification(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ DeleteNotificationsThreadsByThreadID: mockResponse(t, http.StatusOK, nil), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "threadID": "123", "state": "done", }, @@ -498,7 +498,7 @@ func Test_DismissNotification(t *testing.T) { { name: "invalid threadID format", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "threadID": "notanumber", "state": "done", }, @@ -508,7 +508,7 @@ func Test_DismissNotification(t *testing.T) { { name: "missing required threadID", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "state": "read", }, expectError: true, @@ -516,7 +516,7 @@ func Test_DismissNotification(t *testing.T) { { name: "missing required state", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "threadID": "123", }, expectError: true, @@ -524,7 +524,7 @@ func Test_DismissNotification(t *testing.T) { { name: "invalid state value", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "threadID": "123", "state": "invalid", }, @@ -597,7 +597,7 @@ func Test_MarkAllNotificationsRead(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectMarked bool expectedErrMsg string @@ -607,7 +607,7 @@ func Test_MarkAllNotificationsRead(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PutNotifications: mockResponse(t, http.StatusOK, nil), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: false, expectMarked: true, }, @@ -616,7 +616,7 @@ func Test_MarkAllNotificationsRead(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PutNotifications: mockResponse(t, http.StatusOK, nil), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "lastReadAt": "2024-01-01T00:00:00Z", }, expectError: false, @@ -627,7 +627,7 @@ func Test_MarkAllNotificationsRead(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PutReposNotificationsByOwnerByRepo: mockResponse(t, http.StatusOK, nil), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "octocat", "repo": "hello-world", }, @@ -639,7 +639,7 @@ func Test_MarkAllNotificationsRead(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PutNotifications: mockResponse(t, http.StatusInternalServerError, `{"message": "error"}`), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: true, expectedErrMsg: "error", }, @@ -694,7 +694,7 @@ func Test_GetNotificationDetails(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectResult *github.Notification expectedErrMsg string @@ -704,7 +704,7 @@ func Test_GetNotificationDetails(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetNotificationsThreadsByThreadID: mockResponse(t, http.StatusOK, mockThread), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "notificationID": "123", }, expectError: false, @@ -715,7 +715,7 @@ func Test_GetNotificationDetails(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetNotificationsThreadsByThreadID: mockResponse(t, http.StatusNotFound, `{"message": "not found"}`), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "notificationID": "123", }, expectError: true, diff --git a/pkg/github/params.go b/pkg/github/params.go new file mode 100644 index 0000000000..0dac1773fe --- /dev/null +++ b/pkg/github/params.go @@ -0,0 +1,393 @@ +package github + +import ( + "errors" + "fmt" + "strconv" + + "github.com/google/go-github/v82/github" + "github.com/google/jsonschema-go/jsonschema" +) + +// OptionalParamOK is a helper function that can be used to fetch a requested parameter from the request. +// It returns the value, a boolean indicating if the parameter was present, and an error if the type is wrong. +func OptionalParamOK[T any, A map[string]any](args A, p string) (value T, ok bool, err error) { + // Check if the parameter is present in the request + val, exists := args[p] + if !exists { + // Not present, return zero value, false, no error + return + } + + // Check if the parameter is of the expected type + value, ok = val.(T) + if !ok { + // Present but wrong type + err = fmt.Errorf("parameter %s is not of type %T, is %T", p, value, val) + ok = true // Set ok to true because the parameter *was* present, even if wrong type + return + } + + // Present and correct type + ok = true + return +} + +// isAcceptedError checks if the error is an accepted error. +func isAcceptedError(err error) bool { + var acceptedError *github.AcceptedError + return errors.As(err, &acceptedError) +} + +// RequiredParam is a helper function that can be used to fetch a requested parameter from the request. +// It does the following checks: +// 1. Checks if the parameter is present in the request. +// 2. Checks if the parameter is of the expected type. +// 3. Checks if the parameter is not empty, i.e: non-zero value +func RequiredParam[T comparable](args map[string]any, p string) (T, error) { + var zero T + + // Check if the parameter is present in the request + if _, ok := args[p]; !ok { + return zero, fmt.Errorf("missing required parameter: %s", p) + } + + // Check if the parameter is of the expected type + val, ok := args[p].(T) + if !ok { + return zero, fmt.Errorf("parameter %s is not of type %T", p, zero) + } + + if val == zero { + return zero, fmt.Errorf("missing required parameter: %s", p) + } + + return val, nil +} + +// RequiredInt is a helper function that can be used to fetch a requested parameter from the request. +// It does the following checks: +// 1. Checks if the parameter is present in the request. +// 2. Checks if the parameter is of the expected type. +// 3. Checks if the parameter is not empty, i.e: non-zero value +func RequiredInt(args map[string]any, p string) (int, error) { + v, err := RequiredParam[float64](args, p) + if err != nil { + return 0, err + } + return int(v), nil +} + +// RequiredBigInt is a helper function that can be used to fetch a requested parameter from the request. +// It does the following checks: +// 1. Checks if the parameter is present in the request. +// 2. Checks if the parameter is of the expected type (float64). +// 3. Checks if the parameter is not empty, i.e: non-zero value. +// 4. Validates that the float64 value can be safely converted to int64 without truncation. +func RequiredBigInt(args map[string]any, p string) (int64, error) { + v, err := RequiredParam[float64](args, p) + if err != nil { + return 0, err + } + + result := int64(v) + // Check if converting back produces the same value to avoid silent truncation + if float64(result) != v { + return 0, fmt.Errorf("parameter %s value %f is too large to fit in int64", p, v) + } + return result, nil +} + +// OptionalParam is a helper function that can be used to fetch a requested parameter from the request. +// It does the following checks: +// 1. Checks if the parameter is present in the request, if not, it returns its zero-value +// 2. If it is present, it checks if the parameter is of the expected type and returns it +func OptionalParam[T any](args map[string]any, p string) (T, error) { + var zero T + + // Check if the parameter is present in the request + if _, ok := args[p]; !ok { + return zero, nil + } + + // Check if the parameter is of the expected type + if _, ok := args[p].(T); !ok { + return zero, fmt.Errorf("parameter %s is not of type %T, is %T", p, zero, args[p]) + } + + return args[p].(T), nil +} + +// OptionalIntParam is a helper function that can be used to fetch a requested parameter from the request. +// It does the following checks: +// 1. Checks if the parameter is present in the request, if not, it returns its zero-value +// 2. If it is present, it checks if the parameter is of the expected type and returns it +func OptionalIntParam(args map[string]any, p string) (int, error) { + v, err := OptionalParam[float64](args, p) + if err != nil { + return 0, err + } + return int(v), nil +} + +// OptionalIntParamWithDefault is a helper function that can be used to fetch a requested parameter from the request +// similar to optionalIntParam, but it also takes a default value. +func OptionalIntParamWithDefault(args map[string]any, p string, d int) (int, error) { + v, err := OptionalIntParam(args, p) + if err != nil { + return 0, err + } + if v == 0 { + return d, nil + } + return v, nil +} + +// OptionalBoolParamWithDefault is a helper function that can be used to fetch a requested parameter from the request +// similar to optionalBoolParam, but it also takes a default value. +func OptionalBoolParamWithDefault(args map[string]any, p string, d bool) (bool, error) { + _, ok := args[p] + v, err := OptionalParam[bool](args, p) + if err != nil { + return false, err + } + if !ok { + return d, nil + } + return v, nil +} + +// OptionalStringArrayParam is a helper function that can be used to fetch a requested parameter from the request. +// It does the following checks: +// 1. Checks if the parameter is present in the request, if not, it returns its zero-value +// 2. If it is present, iterates the elements and checks each is a string +func OptionalStringArrayParam(args map[string]any, p string) ([]string, error) { + // Check if the parameter is present in the request + if _, ok := args[p]; !ok { + return []string{}, nil + } + + switch v := args[p].(type) { + case nil: + return []string{}, nil + case []string: + return v, nil + case []any: + strSlice := make([]string, len(v)) + for i, v := range v { + s, ok := v.(string) + if !ok { + return []string{}, fmt.Errorf("parameter %s is not of type string, is %T", p, v) + } + strSlice[i] = s + } + return strSlice, nil + default: + return []string{}, fmt.Errorf("parameter %s could not be coerced to []string, is %T", p, args[p]) + } +} + +func convertStringSliceToBigIntSlice(s []string) ([]int64, error) { + int64Slice := make([]int64, len(s)) + for i, str := range s { + val, err := convertStringToBigInt(str, 0) + if err != nil { + return nil, fmt.Errorf("failed to convert element %d (%s) to int64: %w", i, str, err) + } + int64Slice[i] = val + } + return int64Slice, nil +} + +func convertStringToBigInt(s string, def int64) (int64, error) { + v, err := strconv.ParseInt(s, 10, 64) + if err != nil { + return def, fmt.Errorf("failed to convert string %s to int64: %w", s, err) + } + return v, nil +} + +// OptionalBigIntArrayParam is a helper function that can be used to fetch a requested parameter from the request. +// It does the following checks: +// 1. Checks if the parameter is present in the request, if not, it returns an empty slice +// 2. If it is present, iterates the elements, checks each is a string, and converts them to int64 values +func OptionalBigIntArrayParam(args map[string]any, p string) ([]int64, error) { + // Check if the parameter is present in the request + if _, ok := args[p]; !ok { + return []int64{}, nil + } + + switch v := args[p].(type) { + case nil: + return []int64{}, nil + case []string: + return convertStringSliceToBigIntSlice(v) + case []any: + int64Slice := make([]int64, len(v)) + for i, v := range v { + s, ok := v.(string) + if !ok { + return []int64{}, fmt.Errorf("parameter %s is not of type string, is %T", p, v) + } + val, err := convertStringToBigInt(s, 0) + if err != nil { + return []int64{}, fmt.Errorf("parameter %s: failed to convert element %d (%s) to int64: %w", p, i, s, err) + } + int64Slice[i] = val + } + return int64Slice, nil + default: + return []int64{}, fmt.Errorf("parameter %s could not be coerced to []int64, is %T", p, args[p]) + } +} + +// WithPagination adds REST API pagination parameters to a tool. +// https://docs.github.com/en/rest/using-the-rest-api/using-pagination-in-the-rest-api +func WithPagination(schema *jsonschema.Schema) *jsonschema.Schema { + schema.Properties["page"] = &jsonschema.Schema{ + Type: "number", + Description: "Page number for pagination (min 1)", + Minimum: jsonschema.Ptr(1.0), + } + + schema.Properties["perPage"] = &jsonschema.Schema{ + Type: "number", + Description: "Results per page for pagination (min 1, max 100)", + Minimum: jsonschema.Ptr(1.0), + Maximum: jsonschema.Ptr(100.0), + } + + return schema +} + +// WithUnifiedPagination adds REST API pagination parameters to a tool. +// GraphQL tools will use this and convert page/perPage to GraphQL cursor parameters internally. +func WithUnifiedPagination(schema *jsonschema.Schema) *jsonschema.Schema { + schema.Properties["page"] = &jsonschema.Schema{ + Type: "number", + Description: "Page number for pagination (min 1)", + Minimum: jsonschema.Ptr(1.0), + } + + schema.Properties["perPage"] = &jsonschema.Schema{ + Type: "number", + Description: "Results per page for pagination (min 1, max 100)", + Minimum: jsonschema.Ptr(1.0), + Maximum: jsonschema.Ptr(100.0), + } + + schema.Properties["after"] = &jsonschema.Schema{ + Type: "string", + Description: "Cursor for pagination. Use the endCursor from the previous page's PageInfo for GraphQL APIs.", + } + + return schema +} + +// WithCursorPagination adds only cursor-based pagination parameters to a tool (no page parameter). +func WithCursorPagination(schema *jsonschema.Schema) *jsonschema.Schema { + schema.Properties["perPage"] = &jsonschema.Schema{ + Type: "number", + Description: "Results per page for pagination (min 1, max 100)", + Minimum: jsonschema.Ptr(1.0), + Maximum: jsonschema.Ptr(100.0), + } + + schema.Properties["after"] = &jsonschema.Schema{ + Type: "string", + Description: "Cursor for pagination. Use the endCursor from the previous page's PageInfo for GraphQL APIs.", + } + + return schema +} + +type PaginationParams struct { + Page int + PerPage int + After string +} + +// OptionalPaginationParams returns the "page", "perPage", and "after" parameters from the request, +// or their default values if not present, "page" default is 1, "perPage" default is 30. +// In future, we may want to make the default values configurable, or even have this +// function returned from `withPagination`, where the defaults are provided alongside +// the min/max values. +func OptionalPaginationParams(args map[string]any) (PaginationParams, error) { + page, err := OptionalIntParamWithDefault(args, "page", 1) + if err != nil { + return PaginationParams{}, err + } + perPage, err := OptionalIntParamWithDefault(args, "perPage", 30) + if err != nil { + return PaginationParams{}, err + } + after, err := OptionalParam[string](args, "after") + if err != nil { + return PaginationParams{}, err + } + return PaginationParams{ + Page: page, + PerPage: perPage, + After: after, + }, nil +} + +// OptionalCursorPaginationParams returns the "perPage" and "after" parameters from the request, +// without the "page" parameter, suitable for cursor-based pagination only. +func OptionalCursorPaginationParams(args map[string]any) (CursorPaginationParams, error) { + perPage, err := OptionalIntParamWithDefault(args, "perPage", 30) + if err != nil { + return CursorPaginationParams{}, err + } + after, err := OptionalParam[string](args, "after") + if err != nil { + return CursorPaginationParams{}, err + } + return CursorPaginationParams{ + PerPage: perPage, + After: after, + }, nil +} + +type CursorPaginationParams struct { + PerPage int + After string +} + +// ToGraphQLParams converts cursor pagination parameters to GraphQL-specific parameters. +func (p CursorPaginationParams) ToGraphQLParams() (*GraphQLPaginationParams, error) { + if p.PerPage > 100 { + return nil, fmt.Errorf("perPage value %d exceeds maximum of 100", p.PerPage) + } + if p.PerPage < 0 { + return nil, fmt.Errorf("perPage value %d cannot be negative", p.PerPage) + } + first := int32(p.PerPage) + + var after *string + if p.After != "" { + after = &p.After + } + + return &GraphQLPaginationParams{ + First: &first, + After: after, + }, nil +} + +type GraphQLPaginationParams struct { + First *int32 + After *string +} + +// ToGraphQLParams converts REST API pagination parameters to GraphQL-specific parameters. +// This converts page/perPage to first parameter for GraphQL queries. +// If After is provided, it takes precedence over page-based pagination. +func (p PaginationParams) ToGraphQLParams() (*GraphQLPaginationParams, error) { + // Convert to CursorPaginationParams and delegate to avoid duplication + cursor := CursorPaginationParams{ + PerPage: p.PerPage, + After: p.After, + } + return cursor.ToGraphQLParams() +} diff --git a/pkg/github/params_test.go b/pkg/github/params_test.go new file mode 100644 index 0000000000..5c989d55ab --- /dev/null +++ b/pkg/github/params_test.go @@ -0,0 +1,503 @@ +package github + +import ( + "fmt" + "testing" + + "github.com/google/go-github/v82/github" + "github.com/stretchr/testify/assert" +) + +func Test_IsAcceptedError(t *testing.T) { + tests := []struct { + name string + err error + expectAccepted bool + }{ + { + name: "github AcceptedError", + err: &github.AcceptedError{}, + expectAccepted: true, + }, + { + name: "regular error", + err: fmt.Errorf("some other error"), + expectAccepted: false, + }, + { + name: "nil error", + err: nil, + expectAccepted: false, + }, + { + name: "wrapped AcceptedError", + err: fmt.Errorf("wrapped: %w", &github.AcceptedError{}), + expectAccepted: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := isAcceptedError(tc.err) + assert.Equal(t, tc.expectAccepted, result) + }) + } +} + +func Test_RequiredStringParam(t *testing.T) { + tests := []struct { + name string + params map[string]any + paramName string + expected string + expectError bool + }{ + { + name: "valid string parameter", + params: map[string]any{"name": "test-value"}, + paramName: "name", + expected: "test-value", + expectError: false, + }, + { + name: "missing parameter", + params: map[string]any{}, + paramName: "name", + expected: "", + expectError: true, + }, + { + name: "empty string parameter", + params: map[string]any{"name": ""}, + paramName: "name", + expected: "", + expectError: true, + }, + { + name: "wrong type parameter", + params: map[string]any{"name": 123}, + paramName: "name", + expected: "", + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result, err := RequiredParam[string](tc.params, tc.paramName) + + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func Test_OptionalStringParam(t *testing.T) { + tests := []struct { + name string + params map[string]any + paramName string + expected string + expectError bool + }{ + { + name: "valid string parameter", + params: map[string]any{"name": "test-value"}, + paramName: "name", + expected: "test-value", + expectError: false, + }, + { + name: "missing parameter", + params: map[string]any{}, + paramName: "name", + expected: "", + expectError: false, + }, + { + name: "empty string parameter", + params: map[string]any{"name": ""}, + paramName: "name", + expected: "", + expectError: false, + }, + { + name: "wrong type parameter", + params: map[string]any{"name": 123}, + paramName: "name", + expected: "", + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result, err := OptionalParam[string](tc.params, tc.paramName) + + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func Test_RequiredInt(t *testing.T) { + tests := []struct { + name string + params map[string]any + paramName string + expected int + expectError bool + }{ + { + name: "valid number parameter", + params: map[string]any{"count": float64(42)}, + paramName: "count", + expected: 42, + expectError: false, + }, + { + name: "missing parameter", + params: map[string]any{}, + paramName: "count", + expected: 0, + expectError: true, + }, + { + name: "wrong type parameter", + params: map[string]any{"count": "not-a-number"}, + paramName: "count", + expected: 0, + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result, err := RequiredInt(tc.params, tc.paramName) + + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expected, result) + } + }) + } +} +func Test_OptionalIntParam(t *testing.T) { + tests := []struct { + name string + params map[string]any + paramName string + expected int + expectError bool + }{ + { + name: "valid number parameter", + params: map[string]any{"count": float64(42)}, + paramName: "count", + expected: 42, + expectError: false, + }, + { + name: "missing parameter", + params: map[string]any{}, + paramName: "count", + expected: 0, + expectError: false, + }, + { + name: "zero value", + params: map[string]any{"count": float64(0)}, + paramName: "count", + expected: 0, + expectError: false, + }, + { + name: "wrong type parameter", + params: map[string]any{"count": "not-a-number"}, + paramName: "count", + expected: 0, + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result, err := OptionalIntParam(tc.params, tc.paramName) + + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func Test_OptionalNumberParamWithDefault(t *testing.T) { + tests := []struct { + name string + params map[string]any + paramName string + defaultVal int + expected int + expectError bool + }{ + { + name: "valid number parameter", + params: map[string]any{"count": float64(42)}, + paramName: "count", + defaultVal: 10, + expected: 42, + expectError: false, + }, + { + name: "missing parameter", + params: map[string]any{}, + paramName: "count", + defaultVal: 10, + expected: 10, + expectError: false, + }, + { + name: "zero value", + params: map[string]any{"count": float64(0)}, + paramName: "count", + defaultVal: 10, + expected: 10, + expectError: false, + }, + { + name: "wrong type parameter", + params: map[string]any{"count": "not-a-number"}, + paramName: "count", + defaultVal: 10, + expected: 0, + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result, err := OptionalIntParamWithDefault(tc.params, tc.paramName, tc.defaultVal) + + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func Test_OptionalBooleanParam(t *testing.T) { + tests := []struct { + name string + params map[string]any + paramName string + expected bool + expectError bool + }{ + { + name: "true value", + params: map[string]any{"flag": true}, + paramName: "flag", + expected: true, + expectError: false, + }, + { + name: "false value", + params: map[string]any{"flag": false}, + paramName: "flag", + expected: false, + expectError: false, + }, + { + name: "missing parameter", + params: map[string]any{}, + paramName: "flag", + expected: false, + expectError: false, + }, + { + name: "wrong type parameter", + params: map[string]any{"flag": "not-a-boolean"}, + paramName: "flag", + expected: false, + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result, err := OptionalParam[bool](tc.params, tc.paramName) + + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func TestOptionalStringArrayParam(t *testing.T) { + tests := []struct { + name string + params map[string]any + paramName string + expected []string + expectError bool + }{ + { + name: "parameter not in request", + params: map[string]any{}, + paramName: "flag", + expected: []string{}, + expectError: false, + }, + { + name: "valid any array parameter", + params: map[string]any{ + "flag": []any{"v1", "v2"}, + }, + paramName: "flag", + expected: []string{"v1", "v2"}, + expectError: false, + }, + { + name: "valid string array parameter", + params: map[string]any{ + "flag": []string{"v1", "v2"}, + }, + paramName: "flag", + expected: []string{"v1", "v2"}, + expectError: false, + }, + { + name: "wrong type parameter", + params: map[string]any{ + "flag": 1, + }, + paramName: "flag", + expected: []string{}, + expectError: true, + }, + { + name: "wrong slice type parameter", + params: map[string]any{ + "flag": []any{"foo", 2}, + }, + paramName: "flag", + expected: []string{}, + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result, err := OptionalStringArrayParam(tc.params, tc.paramName) + + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func TestOptionalPaginationParams(t *testing.T) { + tests := []struct { + name string + params map[string]any + expected PaginationParams + expectError bool + }{ + { + name: "no pagination parameters, default values", + params: map[string]any{}, + expected: PaginationParams{ + Page: 1, + PerPage: 30, + }, + expectError: false, + }, + { + name: "page parameter, default perPage", + params: map[string]any{ + "page": float64(2), + }, + expected: PaginationParams{ + Page: 2, + PerPage: 30, + }, + expectError: false, + }, + { + name: "perPage parameter, default page", + params: map[string]any{ + "perPage": float64(50), + }, + expected: PaginationParams{ + Page: 1, + PerPage: 50, + }, + expectError: false, + }, + { + name: "page and perPage parameters", + params: map[string]any{ + "page": float64(2), + "perPage": float64(50), + }, + expected: PaginationParams{ + Page: 2, + PerPage: 50, + }, + expectError: false, + }, + { + name: "invalid page parameter", + params: map[string]any{ + "page": "not-a-number", + }, + expected: PaginationParams{}, + expectError: true, + }, + { + name: "invalid perPage parameter", + params: map[string]any{ + "perPage": "not-a-number", + }, + expected: PaginationParams{}, + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result, err := OptionalPaginationParams(tc.params) + + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expected, result) + } + }) + } +} diff --git a/pkg/github/projects.go b/pkg/github/projects.go index 4fed6364f5..dcb9193eca 100644 --- a/pkg/github/projects.go +++ b/pkg/github/projects.go @@ -6,1023 +6,133 @@ import ( "fmt" "io" "net/http" - "strings" + "time" ghErrors "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/inventory" "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/shurcooL/githubv4" ) const ( - ProjectUpdateFailedError = "failed to update a project item" - ProjectAddFailedError = "failed to add a project item" - ProjectDeleteFailedError = "failed to delete a project item" - ProjectListFailedError = "failed to list project items" - MaxProjectsPerPage = 50 + ProjectUpdateFailedError = "failed to update a project item" + ProjectAddFailedError = "failed to add a project item" + ProjectDeleteFailedError = "failed to delete a project item" + ProjectListFailedError = "failed to list project items" + ProjectStatusUpdateListFailedError = "failed to list project status updates" + ProjectStatusUpdateGetFailedError = "failed to get project status update" + ProjectStatusUpdateCreateFailedError = "failed to create project status update" + ProjectResolveIDFailedError = "failed to resolve project ID" + MaxProjectsPerPage = 50 ) -// FeatureFlagHoldbackConsolidatedProjects is the feature flag that, when enabled, reverts to -// individual project tools instead of the consolidated project tools. -const FeatureFlagHoldbackConsolidatedProjects = "mcp_holdback_consolidated_projects" - // Method constants for consolidated project tools const ( - projectsMethodListProjects = "list_projects" - projectsMethodListProjectFields = "list_project_fields" - projectsMethodListProjectItems = "list_project_items" - projectsMethodGetProject = "get_project" - projectsMethodGetProjectField = "get_project_field" - projectsMethodGetProjectItem = "get_project_item" - projectsMethodAddProjectItem = "add_project_item" - projectsMethodUpdateProjectItem = "update_project_item" - projectsMethodDeleteProjectItem = "delete_project_item" + projectsMethodListProjects = "list_projects" + projectsMethodListProjectFields = "list_project_fields" + projectsMethodListProjectItems = "list_project_items" + projectsMethodGetProject = "get_project" + projectsMethodGetProjectField = "get_project_field" + projectsMethodGetProjectItem = "get_project_item" + projectsMethodAddProjectItem = "add_project_item" + projectsMethodUpdateProjectItem = "update_project_item" + projectsMethodDeleteProjectItem = "delete_project_item" + projectsMethodListProjectStatusUpdates = "list_project_status_updates" + projectsMethodGetProjectStatusUpdate = "get_project_status_update" + projectsMethodCreateProjectStatusUpdate = "create_project_status_update" ) -func ListProjects(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "list_projects", - Description: t("TOOL_LIST_PROJECTS_DESCRIPTION", `List Projects for a user or organization`), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_LIST_PROJECTS_USER_TITLE", "List projects"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - "query": { - Type: "string", - Description: `Filter projects by title text and open/closed state; permitted qualifiers: is:open, is:closed; examples: "roadmap is:open", "is:open feature planning".`, - }, - "per_page": { - Type: "number", - Description: fmt.Sprintf("Results per page (max %d)", MaxProjectsPerPage), - }, - "after": { - Type: "string", - Description: "Forward pagination cursor from previous pageInfo.nextCursor.", - }, - "before": { - Type: "string", - Description: "Backward pagination cursor from previous pageInfo.prevCursor (rare).", - }, - }, - Required: []string{"owner_type", "owner"}, - }, - }, - []scopes.Scope{scopes.ReadProject}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - queryStr, err := OptionalParam[string](args, "query") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - pagination, err := extractPaginationOptionsFromArgs(args) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - var resp *github.Response - var projects []*github.ProjectV2 - var queryPtr *string - - if queryStr != "" { - queryPtr = &queryStr - } - - minimalProjects := []MinimalProject{} - opts := &github.ListProjectsOptions{ - ListProjectsPaginationOptions: pagination, - Query: queryPtr, - } - - if ownerType == "org" { - projects, resp, err = client.Projects.ListOrganizationProjects(ctx, owner, opts) - } else { - projects, resp, err = client.Projects.ListUserProjects(ctx, owner, opts) - } - - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - "failed to list projects", - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - for _, project := range projects { - minimalProjects = append(minimalProjects, *convertToMinimalProject(project)) - } - - response := map[string]any{ - "projects": minimalProjects, - "pageInfo": buildPageInfo(resp), - } - - r, err := json.Marshal(response) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } +// GraphQL types for ProjectV2 status updates - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool +type statusUpdateNode struct { + ID githubv4.ID + Body *githubv4.String + Status *githubv4.String + CreatedAt githubv4.DateTime + StartDate *githubv4.String + TargetDate *githubv4.String + Creator struct { + Login githubv4.String + } } -func GetProject(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "get_project", - Description: t("TOOL_GET_PROJECT_DESCRIPTION", "Get Project for a user or org"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_GET_PROJECT_USER_TITLE", "Get project"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "project_number": { - Type: "number", - Description: "The project's number", - }, - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - }, - Required: []string{"project_number", "owner_type", "owner"}, - }, - }, - []scopes.Scope{scopes.ReadProject}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - var resp *github.Response - var project *github.ProjectV2 - - if ownerType == "org" { - project, resp, err = client.Projects.GetOrganizationProject(ctx, owner, projectNumber) - } else { - project, resp, err = client.Projects.GetUserProject(ctx, owner, projectNumber) - } - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - "failed to get project", - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - if resp.StatusCode != http.StatusOK { - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, nil, fmt.Errorf("failed to read response body: %w", err) - } - return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, "failed to get project", resp, body), nil, nil - } - - minimalProject := convertToMinimalProject(project) - r, err := json.Marshal(minimalProject) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool +type statusUpdateConnection struct { + Nodes []statusUpdateNode + PageInfo PageInfoFragment } -func ListProjectFields(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "list_project_fields", - Description: t("TOOL_LIST_PROJECT_FIELDS_DESCRIPTION", "List Project fields for a user or org"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_LIST_PROJECT_FIELDS_USER_TITLE", "List project fields"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - "project_number": { - Type: "number", - Description: "The project's number.", - }, - "per_page": { - Type: "number", - Description: fmt.Sprintf("Results per page (max %d)", MaxProjectsPerPage), - }, - "after": { - Type: "string", - Description: "Forward pagination cursor from previous pageInfo.nextCursor.", - }, - "before": { - Type: "string", - Description: "Backward pagination cursor from previous pageInfo.prevCursor (rare).", - }, - }, - Required: []string{"owner_type", "owner", "project_number"}, - }, - }, - []scopes.Scope{scopes.ReadProject}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - pagination, err := extractPaginationOptionsFromArgs(args) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - var resp *github.Response - var projectFields []*github.ProjectV2Field - - opts := &github.ListProjectsOptions{ - ListProjectsPaginationOptions: pagination, - } - - if ownerType == "org" { - projectFields, resp, err = client.Projects.ListOrganizationProjectFields(ctx, owner, projectNumber, opts) - } else { - projectFields, resp, err = client.Projects.ListUserProjectFields(ctx, owner, projectNumber, opts) - } - - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - "failed to list project fields", - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - response := map[string]any{ - "fields": projectFields, - "pageInfo": buildPageInfo(resp), - } - - r, err := json.Marshal(response) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool +// statusUpdatesUserQuery is the GraphQL query for listing status updates on a user-owned project. +type statusUpdatesUserQuery struct { + User struct { + ProjectV2 struct { + StatusUpdates statusUpdateConnection `graphql:"statusUpdates(first: $first, after: $after, orderBy: {field: CREATED_AT, direction: DESC})"` + } `graphql:"projectV2(number: $projectNumber)"` + } `graphql:"user(login: $owner)"` } -func GetProjectField(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "get_project_field", - Description: t("TOOL_GET_PROJECT_FIELD_DESCRIPTION", "Get Project field for a user or org"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_GET_PROJECT_FIELD_USER_TITLE", "Get project field"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - "project_number": { - Type: "number", - Description: "The project's number.", - }, - "field_id": { - Type: "number", - Description: "The field's id.", - }, - }, - Required: []string{"owner_type", "owner", "project_number", "field_id"}, - }, - }, - []scopes.Scope{scopes.ReadProject}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - fieldID, err := RequiredBigInt(args, "field_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - var resp *github.Response - var projectField *github.ProjectV2Field - - if ownerType == "org" { - projectField, resp, err = client.Projects.GetOrganizationProjectField(ctx, owner, projectNumber, fieldID) - } else { - projectField, resp, err = client.Projects.GetUserProjectField(ctx, owner, projectNumber, fieldID) - } - - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - "failed to get project field", - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - if resp.StatusCode != http.StatusOK { - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, nil, fmt.Errorf("failed to read response body: %w", err) - } - return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, "failed to get project field", resp, body), nil, nil - } - r, err := json.Marshal(projectField) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool -} - -func ListProjectItems(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "list_project_items", - Description: t("TOOL_LIST_PROJECT_ITEMS_DESCRIPTION", `Search project items with advanced filtering`), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_LIST_PROJECT_ITEMS_USER_TITLE", "List project items"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - "project_number": { - Type: "number", - Description: "The project's number.", - }, - "query": { - Type: "string", - Description: `Query string for advanced filtering of project items using GitHub's project filtering syntax.`, - }, - "per_page": { - Type: "number", - Description: fmt.Sprintf("Results per page (max %d)", MaxProjectsPerPage), - }, - "after": { - Type: "string", - Description: "Forward pagination cursor from previous pageInfo.nextCursor.", - }, - "before": { - Type: "string", - Description: "Backward pagination cursor from previous pageInfo.prevCursor (rare).", - }, - "fields": { - Type: "array", - Description: "Field IDs to include (e.g. [\"102589\", \"985201\"]). CRITICAL: Always provide to get field values. Without this, only titles returned.", - Items: &jsonschema.Schema{ - Type: "string", - }, - }, - }, - Required: []string{"owner_type", "owner", "project_number"}, - }, - }, - []scopes.Scope{scopes.ReadProject}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - queryStr, err := OptionalParam[string](args, "query") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - fields, err := OptionalBigIntArrayParam(args, "fields") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - pagination, err := extractPaginationOptionsFromArgs(args) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - var resp *github.Response - var projectItems []*github.ProjectV2Item - var queryPtr *string - - if queryStr != "" { - queryPtr = &queryStr - } - - opts := &github.ListProjectItemsOptions{ - Fields: fields, - ListProjectsOptions: github.ListProjectsOptions{ - ListProjectsPaginationOptions: pagination, - Query: queryPtr, - }, - } - - if ownerType == "org" { - projectItems, resp, err = client.Projects.ListOrganizationProjectItems(ctx, owner, projectNumber, opts) - } else { - projectItems, resp, err = client.Projects.ListUserProjectItems(ctx, owner, projectNumber, opts) - } - - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - ProjectListFailedError, - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - response := map[string]any{ - "items": projectItems, - "pageInfo": buildPageInfo(resp), - } - - r, err := json.Marshal(response) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool -} - -func GetProjectItem(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "get_project_item", - Description: t("TOOL_GET_PROJECT_ITEM_DESCRIPTION", "Get a specific Project item for a user or org"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_GET_PROJECT_ITEM_USER_TITLE", "Get project item"), - ReadOnlyHint: true, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - "project_number": { - Type: "number", - Description: "The project's number.", - }, - "item_id": { - Type: "number", - Description: "The item's ID.", - }, - "fields": { - Type: "array", - Description: "Specific list of field IDs to include in the response (e.g. [\"102589\", \"985201\", \"169875\"]). If not provided, only the title field is included.", - Items: &jsonschema.Schema{ - Type: "string", - }, - }, - }, - Required: []string{"owner_type", "owner", "project_number", "item_id"}, - }, - }, - []scopes.Scope{scopes.ReadProject}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - itemID, err := RequiredBigInt(args, "item_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - fields, err := OptionalBigIntArrayParam(args, "fields") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - var resp *github.Response - var projectItem *github.ProjectV2Item - var opts *github.GetProjectItemOptions - - if len(fields) > 0 { - opts = &github.GetProjectItemOptions{ - Fields: fields, - } - } - - if ownerType == "org" { - projectItem, resp, err = client.Projects.GetOrganizationProjectItem(ctx, owner, projectNumber, itemID, opts) - } else { - projectItem, resp, err = client.Projects.GetUserProjectItem(ctx, owner, projectNumber, itemID, opts) - } - - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - "failed to get project item", - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - r, err := json.Marshal(projectItem) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool +// statusUpdatesOrgQuery is the GraphQL query for listing status updates on an org-owned project. +type statusUpdatesOrgQuery struct { + Organization struct { + ProjectV2 struct { + StatusUpdates statusUpdateConnection `graphql:"statusUpdates(first: $first, after: $after, orderBy: {field: CREATED_AT, direction: DESC})"` + } `graphql:"projectV2(number: $projectNumber)"` + } `graphql:"organization(login: $owner)"` } -func AddProjectItem(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "add_project_item", - Description: t("TOOL_ADD_PROJECT_ITEM_DESCRIPTION", "Add a specific Project item for a user or org"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_ADD_PROJECT_ITEM_USER_TITLE", "Add project item"), - ReadOnlyHint: false, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - "project_number": { - Type: "number", - Description: "The project's number.", - }, - "item_type": { - Type: "string", - Description: "The item's type, either issue or pull_request.", - Enum: []any{"issue", "pull_request"}, - }, - "item_id": { - Type: "number", - Description: "The numeric ID of the issue or pull request to add to the project.", - }, - }, - Required: []string{"owner_type", "owner", "project_number", "item_type", "item_id"}, - }, - }, - []scopes.Scope{scopes.Project}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - itemID, err := RequiredBigInt(args, "item_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - itemType, err := RequiredParam[string](args, "item_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - if itemType != "issue" && itemType != "pull_request" { - return utils.NewToolResultError("item_type must be either 'issue' or 'pull_request'"), nil, nil - } - - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - newItem := &github.AddProjectItemOptions{ - ID: itemID, - Type: toNewProjectType(itemType), - } - - var resp *github.Response - var addedItem *github.ProjectV2Item - - if ownerType == "org" { - addedItem, resp, err = client.Projects.AddOrganizationProjectItem(ctx, owner, projectNumber, newItem) - } else { - addedItem, resp, err = client.Projects.AddUserProjectItem(ctx, owner, projectNumber, newItem) - } - - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - ProjectAddFailedError, - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - if resp.StatusCode != http.StatusCreated { - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, nil, fmt.Errorf("failed to read response body: %w", err) - } - return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, ProjectAddFailedError, resp, body), nil, nil - } - r, err := json.Marshal(addedItem) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool -} - -func UpdateProjectItem(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "update_project_item", - Description: t("TOOL_UPDATE_PROJECT_ITEM_DESCRIPTION", "Update a specific Project item for a user or org"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_UPDATE_PROJECT_ITEM_USER_TITLE", "Update project item"), - ReadOnlyHint: false, - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - "project_number": { - Type: "number", - Description: "The project's number.", - }, - "item_id": { - Type: "number", - Description: "The unique identifier of the project item. This is not the issue or pull request ID.", - }, - "updated_field": { - Type: "object", - Description: "Object consisting of the ID of the project field to update and the new value for the field. To clear the field, set value to null. Example: {\"id\": 123456, \"value\": \"New Value\"}", - }, - }, - Required: []string{"owner_type", "owner", "project_number", "item_id", "updated_field"}, - }, - }, - []scopes.Scope{scopes.Project}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { - - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - itemID, err := RequiredBigInt(args, "item_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - rawUpdatedField, exists := args["updated_field"] - if !exists { - return utils.NewToolResultError("missing required parameter: updated_field"), nil, nil - } - - fieldValue, ok := rawUpdatedField.(map[string]any) - if !ok || fieldValue == nil { - return utils.NewToolResultError("field_value must be an object"), nil, nil - } - - updatePayload, err := buildUpdateProjectItem(fieldValue) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - - var resp *github.Response - var updatedItem *github.ProjectV2Item - - if ownerType == "org" { - updatedItem, resp, err = client.Projects.UpdateOrganizationProjectItem(ctx, owner, projectNumber, itemID, updatePayload) - } else { - updatedItem, resp, err = client.Projects.UpdateUserProjectItem(ctx, owner, projectNumber, itemID, updatePayload) - } - - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - ProjectUpdateFailedError, - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() - - if resp.StatusCode != http.StatusOK { - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, nil, fmt.Errorf("failed to read response body: %w", err) - } - return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, ProjectUpdateFailedError, resp, body), nil, nil - } - r, err := json.Marshal(updatedItem) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } - - return utils.NewToolResultText(string(r)), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool +// statusUpdateNodeQuery is the GraphQL query for fetching a single status update by node ID. +type statusUpdateNodeQuery struct { + Node struct { + StatusUpdate statusUpdateNode `graphql:"... on ProjectV2StatusUpdate"` + } `graphql:"node(id: $id)"` } -func DeleteProjectItem(t translations.TranslationHelperFunc) inventory.ServerTool { - tool := NewTool( - ToolsetMetadataProjects, - mcp.Tool{ - Name: "delete_project_item", - Description: t("TOOL_DELETE_PROJECT_ITEM_DESCRIPTION", "Delete a specific Project item for a user or org"), - Annotations: &mcp.ToolAnnotations{ - Title: t("TOOL_DELETE_PROJECT_ITEM_USER_TITLE", "Delete project item"), - ReadOnlyHint: false, - DestructiveHint: jsonschema.Ptr(true), - }, - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner_type": { - Type: "string", - Description: "Owner type", - Enum: []any{"user", "org"}, - }, - "owner": { - Type: "string", - Description: "If owner_type == user it is the handle for the GitHub user account. If owner_type == org it is the name of the organization. The name is not case sensitive.", - }, - "project_number": { - Type: "number", - Description: "The project's number.", - }, - "item_id": { - Type: "number", - Description: "The internal project item ID to delete from the project (not the issue or pull request ID).", - }, - }, - Required: []string{"owner_type", "owner", "project_number", "item_id"}, - }, - }, - []scopes.Scope{scopes.Project}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { +// CreateProjectV2StatusUpdateInput is the input for the createProjectV2StatusUpdate mutation. +// Defined locally because the shurcooL/githubv4 library does not include this type. +type CreateProjectV2StatusUpdateInput struct { + ProjectID githubv4.ID `json:"projectId"` + Body *githubv4.String `json:"body,omitempty"` + Status *githubv4.String `json:"status,omitempty"` + StartDate *githubv4.String `json:"startDate,omitempty"` + TargetDate *githubv4.String `json:"targetDate,omitempty"` + ClientMutationID *githubv4.String `json:"clientMutationId,omitempty"` +} - owner, err := RequiredParam[string](args, "owner") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - ownerType, err := RequiredParam[string](args, "owner_type") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - itemID, err := RequiredBigInt(args, "item_id") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - client, err := deps.GetClient(ctx) - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } +// validProjectV2StatusUpdateStatuses is the set of valid status values for the createProjectV2StatusUpdate mutation. +var validProjectV2StatusUpdateStatuses = map[string]bool{ + "INACTIVE": true, + "ON_TRACK": true, + "AT_RISK": true, + "OFF_TRACK": true, + "COMPLETE": true, +} - var resp *github.Response - if ownerType == "org" { - resp, err = client.Projects.DeleteOrganizationProjectItem(ctx, owner, projectNumber, itemID) - } else { - resp, err = client.Projects.DeleteUserProjectItem(ctx, owner, projectNumber, itemID) - } +func convertToMinimalStatusUpdate(node statusUpdateNode) MinimalProjectStatusUpdate { + var creator *MinimalUser + if login := string(node.Creator.Login); login != "" { + creator = &MinimalUser{Login: login} + } - if err != nil { - return ghErrors.NewGitHubAPIErrorResponse(ctx, - ProjectDeleteFailedError, - resp, - err, - ), nil, nil - } - defer func() { _ = resp.Body.Close() }() + return MinimalProjectStatusUpdate{ + ID: fmt.Sprintf("%v", node.ID), + Body: derefString(node.Body), + Status: derefString(node.Status), + CreatedAt: node.CreatedAt.Time.Format(time.RFC3339), + StartDate: derefString(node.StartDate), + TargetDate: derefString(node.TargetDate), + Creator: creator, + } +} - if resp.StatusCode != http.StatusNoContent { - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, nil, fmt.Errorf("failed to read response body: %w", err) - } - return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, ProjectDeleteFailedError, resp, body), nil, nil - } - return utils.NewToolResultText("project item successfully deleted"), nil, nil - }, - ) - tool.FeatureFlagEnable = FeatureFlagHoldbackConsolidatedProjects - return tool +func derefString(s *githubv4.String) string { + if s == nil { + return "" + } + return string(*s) } // ProjectsList returns the tool and handler for listing GitHub Projects resources. @@ -1049,6 +159,7 @@ Use this tool to list projects for a user or organization, or list project field projectsMethodListProjects, projectsMethodListProjectFields, projectsMethodListProjectItems, + projectsMethodListProjectStatusUpdates, }, }, "owner_type": { @@ -1062,7 +173,7 @@ Use this tool to list projects for a user or organization, or list project field }, "project_number": { Type: "number", - Description: "The project's number. Required for 'list_project_fields' and 'list_project_items' methods.", + Description: "The project's number. Required for 'list_project_fields', 'list_project_items', and 'list_project_status_updates' methods.", }, "query": { Type: "string", @@ -1116,8 +227,8 @@ Use this tool to list projects for a user or organization, or list project field switch method { case projectsMethodListProjects: return listProjects(ctx, client, args, owner, ownerType) - case projectsMethodListProjectFields: - // Detect owner type if not provided and project_number is available + default: + // All other methods require project_number and ownerType detection if ownerType == "" { projectNumber, err := RequiredInt(args, "project_number") if err != nil { @@ -1128,26 +239,24 @@ Use this tool to list projects for a user or organization, or list project field return utils.NewToolResultError(err.Error()), nil, nil } } - return listProjectFields(ctx, client, args, owner, ownerType) - case projectsMethodListProjectItems: - // Detect owner type if not provided and project_number is available - if ownerType == "" { - projectNumber, err := RequiredInt(args, "project_number") - if err != nil { - return utils.NewToolResultError(err.Error()), nil, nil - } - ownerType, err = detectOwnerType(ctx, client, owner, projectNumber) + + switch method { + case projectsMethodListProjectFields: + return listProjectFields(ctx, client, args, owner, ownerType) + case projectsMethodListProjectItems: + return listProjectItems(ctx, client, args, owner, ownerType) + case projectsMethodListProjectStatusUpdates: + gqlClient, err := deps.GetGQLClient(ctx) if err != nil { return utils.NewToolResultError(err.Error()), nil, nil } + return listProjectStatusUpdates(ctx, gqlClient, args, owner, ownerType) + default: + return utils.NewToolResultError(fmt.Sprintf("unknown method: %s", method)), nil, nil } - return listProjectItems(ctx, client, args, owner, ownerType) - default: - return utils.NewToolResultError(fmt.Sprintf("unknown method: %s", method)), nil, nil } }, ) - tool.FeatureFlagDisable = FeatureFlagHoldbackConsolidatedProjects return tool } @@ -1174,6 +283,7 @@ Use this tool to get details about individual projects, project fields, and proj projectsMethodGetProject, projectsMethodGetProjectField, projectsMethodGetProjectItem, + projectsMethodGetProjectStatusUpdate, }, }, "owner_type": { @@ -1204,8 +314,12 @@ Use this tool to get details about individual projects, project fields, and proj Type: "string", }, }, + "status_update_id": { + Type: "string", + Description: "The node ID of the project status update. Required for 'get_project_status_update' method.", + }, }, - Required: []string{"method", "owner", "project_number"}, + Required: []string{"method"}, }, }, []scopes.Scope{scopes.ReadProject}, @@ -1215,6 +329,19 @@ Use this tool to get details about individual projects, project fields, and proj return utils.NewToolResultError(err.Error()), nil, nil } + // Handle get_project_status_update early — it only needs status_update_id + if method == projectsMethodGetProjectStatusUpdate { + statusUpdateID, err := RequiredParam[string](args, "status_update_id") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + gqlClient, err := deps.GetGQLClient(ctx) + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + return getProjectStatusUpdate(ctx, gqlClient, statusUpdateID) + } + owner, err := RequiredParam[string](args, "owner") if err != nil { return utils.NewToolResultError(err.Error()), nil, nil @@ -1267,7 +394,6 @@ Use this tool to get details about individual projects, project fields, and proj } }, ) - tool.FeatureFlagDisable = FeatureFlagHoldbackConsolidatedProjects return tool } @@ -1277,7 +403,7 @@ func ProjectsWrite(t translations.TranslationHelperFunc) inventory.ServerTool { ToolsetMetadataProjects, mcp.Tool{ Name: "projects_write", - Description: t("TOOL_PROJECTS_WRITE_DESCRIPTION", "Add, update, or delete project items in a GitHub Project."), + Description: t("TOOL_PROJECTS_WRITE_DESCRIPTION", "Add, update, or delete project items, or create status updates in a GitHub Project."), Annotations: &mcp.ToolAnnotations{ Title: t("TOOL_PROJECTS_WRITE_USER_TITLE", "Modify GitHub Project items"), ReadOnlyHint: false, @@ -1293,6 +419,7 @@ func ProjectsWrite(t translations.TranslationHelperFunc) inventory.ServerTool { projectsMethodAddProjectItem, projectsMethodUpdateProjectItem, projectsMethodDeleteProjectItem, + projectsMethodCreateProjectStatusUpdate, }, }, "owner_type": { @@ -1337,6 +464,23 @@ func ProjectsWrite(t translations.TranslationHelperFunc) inventory.ServerTool { Type: "object", Description: "Object consisting of the ID of the project field to update and the new value for the field. To clear the field, set value to null. Example: {\"id\": 123456, \"value\": \"New Value\"}. Required for 'update_project_item' method.", }, + "body": { + Type: "string", + Description: "The body of the status update (markdown). Used for 'create_project_status_update' method.", + }, + "status": { + Type: "string", + Description: "The status of the project. Used for 'create_project_status_update' method.", + Enum: []any{"INACTIVE", "ON_TRACK", "AT_RISK", "OFF_TRACK", "COMPLETE"}, + }, + "start_date": { + Type: "string", + Description: "The start date of the status update in YYYY-MM-DD format. Used for 'create_project_status_update' method.", + }, + "target_date": { + Type: "string", + Description: "The target date of the status update in YYYY-MM-DD format. Used for 'create_project_status_update' method.", + }, }, Required: []string{"method", "owner", "project_number"}, }, @@ -1433,12 +577,29 @@ func ProjectsWrite(t translations.TranslationHelperFunc) inventory.ServerTool { return utils.NewToolResultError(err.Error()), nil, nil } return deleteProjectItem(ctx, client, owner, ownerType, projectNumber, itemID) + case projectsMethodCreateProjectStatusUpdate: + body, err := OptionalParam[string](args, "body") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + status, err := OptionalParam[string](args, "status") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + startDate, err := OptionalParam[string](args, "start_date") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + targetDate, err := OptionalParam[string](args, "target_date") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + return createProjectStatusUpdate(ctx, gqlClient, owner, ownerType, projectNumber, body, status, startDate, targetDate) default: return utils.NewToolResultError(fmt.Sprintf("unknown method: %s", method)), nil, nil } }, ) - tool.FeatureFlagDisable = FeatureFlagHoldbackConsolidatedProjects return tool } @@ -1864,6 +1025,43 @@ func deleteProjectItem(ctx context.Context, client *github.Client, owner, ownerT return utils.NewToolResultText("project item successfully deleted"), nil, nil } +// resolveProjectNodeID resolves (owner, ownerType, projectNumber) to a project node ID via GraphQL. +func resolveProjectNodeID(ctx context.Context, gqlClient *githubv4.Client, owner, ownerType string, projectNumber int) (githubv4.ID, error) { + var projectIDQueryUser struct { + User struct { + ProjectV2 struct { + ID githubv4.ID + } `graphql:"projectV2(number: $projectNumber)"` + } `graphql:"user(login: $owner)"` + } + var projectIDQueryOrg struct { + Organization struct { + ProjectV2 struct { + ID githubv4.ID + } `graphql:"projectV2(number: $projectNumber)"` + } `graphql:"organization(login: $owner)"` + } + + queryVars := map[string]any{ + "owner": githubv4.String(owner), + "projectNumber": githubv4.Int(int32(projectNumber)), //nolint:gosec // Project numbers are small integers + } + + if ownerType == "org" { + err := gqlClient.Query(ctx, &projectIDQueryOrg, queryVars) + if err != nil { + return "", fmt.Errorf("%s: %w", ProjectResolveIDFailedError, err) + } + return projectIDQueryOrg.Organization.ProjectV2.ID, nil + } + + err := gqlClient.Query(ctx, &projectIDQueryUser, queryVars) + if err != nil { + return "", fmt.Errorf("%s: %w", ProjectResolveIDFailedError, err) + } + return projectIDQueryUser.User.ProjectV2.ID, nil +} + // addProjectItem adds an item to a project by resolving the issue/PR number to a node ID func addProjectItem(ctx context.Context, gqlClient *githubv4.Client, owner, ownerType string, projectNumber int, itemOwner, itemRepo string, itemNumber int, itemType string) (*mcp.CallToolResult, any, error) { if itemType != "issue" && itemType != "pull_request" { @@ -1891,41 +1089,10 @@ func addProjectItem(ctx context.Context, gqlClient *githubv4.Client, owner, owne } `graphql:"addProjectV2ItemById(input: $input)"` } - // First, get the project ID - var projectIDQuery struct { - User struct { - ProjectV2 struct { - ID githubv4.ID - } `graphql:"projectV2(number: $projectNumber)"` - } `graphql:"user(login: $owner)"` - } - var projectIDQueryOrg struct { - Organization struct { - ProjectV2 struct { - ID githubv4.ID - } `graphql:"projectV2(number: $projectNumber)"` - } `graphql:"organization(login: $owner)"` - } - - var projectID githubv4.ID - if ownerType == "org" { - err = gqlClient.Query(ctx, &projectIDQueryOrg, map[string]any{ - "owner": githubv4.String(owner), - "projectNumber": githubv4.Int(int32(projectNumber)), //nolint:gosec // Project numbers are small integers - }) - if err != nil { - return utils.NewToolResultError(fmt.Sprintf("failed to get project ID: %v", err)), nil, nil - } - projectID = projectIDQueryOrg.Organization.ProjectV2.ID - } else { - err = gqlClient.Query(ctx, &projectIDQuery, map[string]any{ - "owner": githubv4.String(owner), - "projectNumber": githubv4.Int(int32(projectNumber)), //nolint:gosec // Project numbers are small integers - }) - if err != nil { - return utils.NewToolResultError(fmt.Sprintf("failed to get project ID: %v", err)), nil, nil - } - projectID = projectIDQuery.User.ProjectV2.ID + // Resolve the project number to a node ID + projectID, err := resolveProjectNodeID(ctx, gqlClient, owner, ownerType, projectNumber) + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil } // Add the item to the project @@ -1952,6 +1119,188 @@ func addProjectItem(ctx context.Context, gqlClient *githubv4.Client, owner, owne return utils.NewToolResultText(string(r)), nil, nil } +// validateDateFormat checks that a date string is in YYYY-MM-DD format. +func validateDateFormat(value, fieldName string) error { + if _, err := time.Parse("2006-01-02", value); err != nil { + return fmt.Errorf("invalid %s %q: must be YYYY-MM-DD format", fieldName, value) + } + return nil +} + +// createProjectStatusUpdate creates a new status update for a project via GraphQL. +func createProjectStatusUpdate(ctx context.Context, gqlClient *githubv4.Client, owner, ownerType string, projectNumber int, body, status, startDate, targetDate string) (*mcp.CallToolResult, any, error) { + // Validate inputs + if ownerType != "user" && ownerType != "org" { + return utils.NewToolResultError(fmt.Sprintf("invalid owner_type %q: must be \"user\" or \"org\"", ownerType)), nil, nil + } + if status != "" && !validProjectV2StatusUpdateStatuses[status] { + return utils.NewToolResultError(fmt.Sprintf("invalid status %q: must be one of INACTIVE, ON_TRACK, AT_RISK, OFF_TRACK, COMPLETE", status)), nil, nil + } + if startDate != "" { + if err := validateDateFormat(startDate, "start_date"); err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + } + if targetDate != "" { + if err := validateDateFormat(targetDate, "target_date"); err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + } + + // Resolve project number to project node ID + projectID, err := resolveProjectNodeID(ctx, gqlClient, owner, ownerType, projectNumber) + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + + // Build mutation input + input := CreateProjectV2StatusUpdateInput{ + ProjectID: projectID, + } + + if body != "" { + s := githubv4.String(body) + input.Body = &s + } + if status != "" { + s := githubv4.String(status) + input.Status = &s + } + if startDate != "" { + s := githubv4.String(startDate) + input.StartDate = &s + } + if targetDate != "" { + s := githubv4.String(targetDate) + input.TargetDate = &s + } + + // Execute mutation + var mutation struct { + CreateProjectV2StatusUpdate struct { + StatusUpdate statusUpdateNode + } `graphql:"createProjectV2StatusUpdate(input: $input)"` + } + + err = gqlClient.Mutate(ctx, &mutation, input, nil) + if err != nil { + return utils.NewToolResultError(fmt.Sprintf("%s: %v", ProjectStatusUpdateCreateFailedError, err)), nil, nil + } + + // Convert and return + result := convertToMinimalStatusUpdate(mutation.CreateProjectV2StatusUpdate.StatusUpdate) + + r, err := json.Marshal(result) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal response: %w", err) + } + + return utils.NewToolResultText(string(r)), nil, nil +} + +// listProjectStatusUpdates lists status updates for a project via GraphQL. +func listProjectStatusUpdates(ctx context.Context, gqlClient *githubv4.Client, args map[string]any, owner, ownerType string) (*mcp.CallToolResult, any, error) { + if ownerType != "user" && ownerType != "org" { + return utils.NewToolResultError(fmt.Sprintf("invalid owner_type %q: must be \"user\" or \"org\"", ownerType)), nil, nil + } + + projectNumber, err := RequiredInt(args, "project_number") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + + perPage, err := OptionalIntParamWithDefault(args, "per_page", MaxProjectsPerPage) + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + if perPage > MaxProjectsPerPage { + perPage = MaxProjectsPerPage + } + if perPage < 1 { + perPage = MaxProjectsPerPage + } + + afterCursor, err := OptionalParam[string](args, "after") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + + vars := map[string]any{ + "owner": githubv4.String(owner), + "projectNumber": githubv4.Int(int32(projectNumber)), //nolint:gosec // Project numbers are small integers + "first": githubv4.Int(int32(perPage)), //nolint:gosec // perPage is bounded by MaxProjectsPerPage + } + if afterCursor != "" { + vars["after"] = githubv4.String(afterCursor) + } else { + vars["after"] = (*githubv4.String)(nil) + } + + var nodes []statusUpdateNode + var pi PageInfoFragment + + if ownerType == "org" { + var q statusUpdatesOrgQuery + if err := gqlClient.Query(ctx, &q, vars); err != nil { + return utils.NewToolResultError(fmt.Sprintf("%s: %v", ProjectStatusUpdateListFailedError, err)), nil, nil + } + nodes = q.Organization.ProjectV2.StatusUpdates.Nodes + pi = q.Organization.ProjectV2.StatusUpdates.PageInfo + } else { + var q statusUpdatesUserQuery + if err := gqlClient.Query(ctx, &q, vars); err != nil { + return utils.NewToolResultError(fmt.Sprintf("%s: %v", ProjectStatusUpdateListFailedError, err)), nil, nil + } + nodes = q.User.ProjectV2.StatusUpdates.Nodes + pi = q.User.ProjectV2.StatusUpdates.PageInfo + } + + updates := make([]MinimalProjectStatusUpdate, 0, len(nodes)) + for _, n := range nodes { + updates = append(updates, convertToMinimalStatusUpdate(n)) + } + + response := map[string]any{ + "statusUpdates": updates, + "pageInfo": map[string]any{ + "hasNextPage": pi.HasNextPage, + "hasPreviousPage": pi.HasPreviousPage, + "nextCursor": string(pi.EndCursor), + "prevCursor": string(pi.StartCursor), + }, + } + + r, err := json.Marshal(response) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal response: %w", err) + } + return utils.NewToolResultText(string(r)), nil, nil +} + +// getProjectStatusUpdate fetches a single status update by its node ID via GraphQL. +func getProjectStatusUpdate(ctx context.Context, gqlClient *githubv4.Client, statusUpdateID string) (*mcp.CallToolResult, any, error) { + var q statusUpdateNodeQuery + vars := map[string]any{ + "id": githubv4.ID(statusUpdateID), + } + + if err := gqlClient.Query(ctx, &q, vars); err != nil { + return utils.NewToolResultError(fmt.Sprintf("%s: %v", ProjectStatusUpdateGetFailedError, err)), nil, nil + } + + if q.Node.StatusUpdate.ID == nil || q.Node.StatusUpdate.ID == "" { + return utils.NewToolResultError(fmt.Sprintf("%s: node is not a ProjectV2StatusUpdate or was not found", ProjectStatusUpdateGetFailedError)), nil, nil + } + + update := convertToMinimalStatusUpdate(q.Node.StatusUpdate) + + r, err := json.Marshal(update) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal response: %w", err) + } + return utils.NewToolResultText(string(r)), nil, nil +} + type pageInfo struct { HasNextPage bool `json:"hasNextPage"` HasPreviousPage bool `json:"hasPreviousPage"` @@ -1959,17 +1308,6 @@ type pageInfo struct { PrevCursor string `json:"prevCursor,omitempty"` } -func toNewProjectType(projType string) string { - switch strings.ToLower(projType) { - case "issue": - return "Issue" - case "pull_request": - return "PullRequest" - default: - return "" - } -} - // validateAndConvertToInt64 ensures the value is a number and converts it to int64. func validateAndConvertToInt64(value any) (int64, error) { switch v := value.(type) { diff --git a/pkg/github/projects_test.go b/pkg/github/projects_test.go index 24163ef90e..9b0e07292f 100644 --- a/pkg/github/projects_test.go +++ b/pkg/github/projects_test.go @@ -9,1529 +9,13 @@ import ( "github.com/github/github-mcp-server/internal/githubv4mock" "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - gh "github.com/google/go-github/v79/github" + gh "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/shurcooL/githubv4" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func Test_ListProjects(t *testing.T) { - serverTool := ListProjects(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "list_projects", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "owner_type") - assert.Contains(t, schema.Properties, "query") - assert.Contains(t, schema.Properties, "per_page") - assert.ElementsMatch(t, schema.Required, []string{"owner", "owner_type"}) - - // API returns full ProjectV2 objects; we only need minimal fields for decoding. - orgProjects := []map[string]any{{"id": 1, "node_id": "NODE1", "title": "Org Project"}} - userProjects := []map[string]any{{"id": 2, "node_id": "NODE2", "title": "User Project"}} - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]interface{} - expectError bool - expectedLength int - expectedErrMsg string - }{ - { - name: "success organization", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2: mockResponse(t, http.StatusOK, orgProjects), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - }, - expectError: false, - expectedLength: 1, - }, - { - name: "success user", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetUsersProjectsV2ByUsername: mockResponse(t, http.StatusOK, userProjects), - }), - requestArgs: map[string]interface{}{ - "owner": "octocat", - "owner_type": "user", - }, - expectError: false, - expectedLength: 1, - }, - { - name: "success organization with pagination & query", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2: expectQueryParams(t, map[string]string{ - "per_page": "50", - "q": "roadmap", - }).andThen(mockResponse(t, http.StatusOK, orgProjects)), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - "per_page": float64(50), - "query": "roadmap", - }, - expectError: false, - expectedLength: 1, - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - }, - expectError: true, - expectedErrMsg: "failed to list projects", - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner_type": "org", - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - if tc.name == "missing owner" { - assert.Contains(t, text, "missing required parameter: owner") - } - if tc.name == "missing owner_type" { - assert.Contains(t, text, "missing required parameter: owner_type") - } - return - } - - require.False(t, result.IsError) - textContent := getTextResult(t, result) - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - projects, ok := response["projects"].([]interface{}) - require.True(t, ok) - assert.Equal(t, tc.expectedLength, len(projects)) - // pageInfo should exist - _, hasPageInfo := response["pageInfo"].(map[string]interface{}) - assert.True(t, hasPageInfo) - }) - } -} - -func Test_GetProject(t *testing.T) { - serverTool := GetProject(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "get_project", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "project_number") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "owner_type") - assert.ElementsMatch(t, schema.Required, []string{"project_number", "owner", "owner_type"}) - - project := map[string]any{"id": 123, "title": "Project Title"} - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]interface{} - expectError bool - expectedErrMsg string - }{ - { - name: "success organization project fetch", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ByProject: mockResponse(t, http.StatusOK, project), - }), - requestArgs: map[string]interface{}{ - "project_number": float64(123), - "owner": "octo-org", - "owner_type": "org", - }, - expectError: false, - }, - { - name: "success user project fetch", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetUsersProjectsV2ByUsernameByProject: mockResponse(t, http.StatusOK, project), - }), - requestArgs: map[string]interface{}{ - "project_number": float64(456), - "owner": "octocat", - "owner_type": "user", - }, - expectError: false, - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ByProject: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]interface{}{ - "project_number": float64(999), - "owner": "octo-org", - "owner_type": "org", - }, - expectError: true, - expectedErrMsg: "failed to get project", - }, - { - name: "missing project_number", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - }, - expectError: true, - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "project_number": float64(123), - "owner_type": "org", - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "project_number": float64(123), - "owner": "octo-org", - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - if tc.name == "missing project_number" { - assert.Contains(t, text, "missing required parameter: project_number") - } - if tc.name == "missing owner" { - assert.Contains(t, text, "missing required parameter: owner") - } - if tc.name == "missing owner_type" { - assert.Contains(t, text, "missing required parameter: owner_type") - } - return - } - - require.False(t, result.IsError) - textContent := getTextResult(t, result) - var arr map[string]any - err = json.Unmarshal([]byte(textContent.Text), &arr) - require.NoError(t, err) - }) - } -} - -func Test_ListProjectFields(t *testing.T) { - serverTool := ListProjectFields(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "list_project_fields", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "owner_type") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "project_number") - assert.Contains(t, schema.Properties, "per_page") - assert.ElementsMatch(t, schema.Required, []string{"owner_type", "owner", "project_number"}) - - orgFields := []map[string]any{{"id": 101, "name": "Status", "data_type": "single_select"}} - userFields := []map[string]any{{"id": 201, "name": "Priority", "data_type": "single_select"}} - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]interface{} - expectError bool - expectedLength int - expectedErrMsg string - }{ - { - name: "success organization fields", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2FieldsByProject: mockResponse(t, http.StatusOK, orgFields), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(123), - }, - expectedLength: 1, - }, - { - name: "success user fields with per_page override", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetUsersProjectsV2FieldsByUsernameByProject: expectQueryParams(t, map[string]string{ - "per_page": "50", - }).andThen(mockResponse(t, http.StatusOK, userFields)), - }), - requestArgs: map[string]interface{}{ - "owner": "octocat", - "owner_type": "user", - "project_number": float64(456), - "per_page": float64(50), - }, - expectedLength: 1, - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2FieldsByProject: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(789), - }, - expectError: true, - expectedErrMsg: "failed to list project fields", - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner_type": "org", - "project_number": 10, - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "project_number": 10, - }, - expectError: true, - }, - { - name: "missing project_number", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - if tc.name == "missing owner" { - assert.Contains(t, text, "missing required parameter: owner") - } - if tc.name == "missing owner_type" { - assert.Contains(t, text, "missing required parameter: owner_type") - } - if tc.name == "missing project_number" { - assert.Contains(t, text, "missing required parameter: project_number") - } - return - } - - require.False(t, result.IsError) - textContent := getTextResult(t, result) - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - fields, ok := response["fields"].([]interface{}) - require.True(t, ok) - assert.Equal(t, tc.expectedLength, len(fields)) - _, hasPageInfo := response["pageInfo"].(map[string]interface{}) - assert.True(t, hasPageInfo) - }) - } -} - -func Test_GetProjectField(t *testing.T) { - serverTool := GetProjectField(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "get_project_field", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "owner_type") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "project_number") - assert.Contains(t, schema.Properties, "field_id") - assert.ElementsMatch(t, schema.Required, []string{"owner_type", "owner", "project_number", "field_id"}) - - orgField := map[string]any{"id": 101, "name": "Status", "dataType": "single_select"} - userField := map[string]any{"id": 202, "name": "Priority", "dataType": "single_select"} - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - expectedID int - }{ - { - name: "success organization field", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2FieldsByProjectByFieldID: mockResponse(t, http.StatusOK, orgField), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(123), - "field_id": float64(101), - }, - expectedID: 101, - }, - { - name: "success user field", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetUsersProjectsV2FieldsByUsernameByProjectByFieldID: mockResponse(t, http.StatusOK, userField), - }), - requestArgs: map[string]any{ - "owner": "octocat", - "owner_type": "user", - "project_number": float64(456), - "field_id": float64(202), - }, - expectedID: 202, - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2FieldsByProjectByFieldID: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(789), - "field_id": float64(303), - }, - expectError: true, - expectedErrMsg: "failed to get project field", - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner_type": "org", - "project_number": float64(10), - "field_id": float64(1), - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "project_number": float64(10), - "field_id": float64(1), - }, - expectError: true, - }, - { - name: "missing project_number", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "field_id": float64(1), - }, - expectError: true, - }, - { - name: "missing field_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(10), - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - if tc.name == "missing owner" { - assert.Contains(t, text, "missing required parameter: owner") - } - if tc.name == "missing owner_type" { - assert.Contains(t, text, "missing required parameter: owner_type") - } - if tc.name == "missing project_number" { - assert.Contains(t, text, "missing required parameter: project_number") - } - if tc.name == "missing field_id" { - assert.Contains(t, text, "missing required parameter: field_id") - } - return - } - - require.False(t, result.IsError) - textContent := getTextResult(t, result) - var field map[string]any - err = json.Unmarshal([]byte(textContent.Text), &field) - require.NoError(t, err) - if tc.expectedID != 0 { - assert.Equal(t, float64(tc.expectedID), field["id"]) - } - }) - } -} - -func Test_ListProjectItems(t *testing.T) { - serverTool := ListProjectItems(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "list_project_items", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "owner_type") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "project_number") - assert.Contains(t, schema.Properties, "query") - assert.Contains(t, schema.Properties, "per_page") - assert.Contains(t, schema.Properties, "fields") - assert.ElementsMatch(t, schema.Required, []string{"owner_type", "owner", "project_number"}) - - orgItems := []map[string]any{ - {"id": 301, "content_type": "Issue", "project_node_id": "PR_1", "fields": []map[string]any{ - {"id": 123, "name": "Status", "data_type": "single_select", "value": "value1"}, - {"id": 456, "name": "Priority", "data_type": "single_select", "value": "value2"}, - }}, - } - userItems := []map[string]any{ - {"id": 401, "content_type": "PullRequest", "project_node_id": "PR_2"}, - {"id": 402, "content_type": "DraftIssue", "project_node_id": "PR_3"}, - } - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]interface{} - expectError bool - expectedLength int - expectedErrMsg string - }{ - { - name: "success organization items", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ItemsByProject: mockResponse(t, http.StatusOK, orgItems), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(123), - }, - expectedLength: 1, - }, - { - name: "success organization items with fields", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ItemsByProject: expectQueryParams(t, map[string]string{ - "fields": "123,456,789", - "per_page": "50", - }).andThen(mockResponse(t, http.StatusOK, orgItems)), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(123), - "fields": []interface{}{"123", "456", "789"}, - }, - expectedLength: 1, - }, - { - name: "success user items", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetUsersProjectsV2ItemsByUsernameByProject: mockResponse(t, http.StatusOK, userItems), - }), - requestArgs: map[string]interface{}{ - "owner": "octocat", - "owner_type": "user", - "project_number": float64(456), - }, - expectedLength: 2, - }, - { - name: "success with pagination and query", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ItemsByProject: expectQueryParams(t, map[string]string{ - "per_page": "50", - "q": "bug", - }).andThen(mockResponse(t, http.StatusOK, orgItems)), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(123), - "per_page": float64(50), - "query": "bug", - }, - expectedLength: 1, - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ItemsByProject: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(789), - }, - expectError: true, - expectedErrMsg: ProjectListFailedError, - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner_type": "org", - "project_number": float64(10), - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "project_number": float64(10), - }, - expectError: true, - }, - { - name: "missing project_number", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ - "owner": "octo-org", - "owner_type": "org", - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - if tc.name == "missing owner" { - assert.Contains(t, text, "missing required parameter: owner") - } - if tc.name == "missing owner_type" { - assert.Contains(t, text, "missing required parameter: owner_type") - } - if tc.name == "missing project_number" { - assert.Contains(t, text, "missing required parameter: project_number") - } - return - } - - require.False(t, result.IsError) - textContent := getTextResult(t, result) - var response map[string]any - err = json.Unmarshal([]byte(textContent.Text), &response) - require.NoError(t, err) - items, ok := response["items"].([]interface{}) - require.True(t, ok) - assert.Equal(t, tc.expectedLength, len(items)) - _, hasPageInfo := response["pageInfo"].(map[string]interface{}) - assert.True(t, hasPageInfo) - }) - } -} - -func Test_GetProjectItem(t *testing.T) { - serverTool := GetProjectItem(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "get_project_item", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "owner_type") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "project_number") - assert.Contains(t, schema.Properties, "item_id") - assert.Contains(t, schema.Properties, "fields") - assert.ElementsMatch(t, schema.Required, []string{"owner_type", "owner", "project_number", "item_id"}) - - orgItem := map[string]any{ - "id": 301, - "content_type": "Issue", - "project_node_id": "PR_1", - "creator": map[string]any{"login": "octocat"}, - } - userItem := map[string]any{ - "id": 501, - "content_type": "PullRequest", - "project_node_id": "PR_2", - "creator": map[string]any{"login": "jane"}, - } - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - expectedID int - }{ - { - name: "success organization item", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ItemsByProjectByItemID: mockResponse(t, http.StatusOK, orgItem), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(123), - "item_id": float64(301), - }, - expectedID: 301, - }, - { - name: "success organization item with fields", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ItemsByProjectByItemID: expectQueryParams(t, map[string]string{ - "fields": "123,456", - }).andThen(mockResponse(t, http.StatusOK, orgItem)), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(123), - "item_id": float64(301), - "fields": []interface{}{"123", "456"}, - }, - expectedID: 301, - }, - { - name: "success user item", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetUsersProjectsV2ItemsByUsernameByProjectByItemID: mockResponse(t, http.StatusOK, userItem), - }), - requestArgs: map[string]any{ - "owner": "octocat", - "owner_type": "user", - "project_number": float64(456), - "item_id": float64(501), - }, - expectedID: 501, - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - GetOrgsProjectsV2ItemsByProjectByItemID: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(789), - "item_id": float64(999), - }, - expectError: true, - expectedErrMsg: "failed to get project item", - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner_type": "org", - "project_number": float64(10), - "item_id": float64(1), - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "project_number": float64(10), - "item_id": float64(1), - }, - expectError: true, - }, - { - name: "missing project_number", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "item_id": float64(1), - }, - expectError: true, - }, - { - name: "missing item_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(10), - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - if tc.name == "missing owner" { - assert.Contains(t, text, "missing required parameter: owner") - } - if tc.name == "missing owner_type" { - assert.Contains(t, text, "missing required parameter: owner_type") - } - if tc.name == "missing project_number" { - assert.Contains(t, text, "missing required parameter: project_number") - } - if tc.name == "missing item_id" { - assert.Contains(t, text, "missing required parameter: item_id") - } - return - } - - require.False(t, result.IsError) - textContent := getTextResult(t, result) - var item map[string]any - err = json.Unmarshal([]byte(textContent.Text), &item) - require.NoError(t, err) - if tc.expectedID != 0 { - assert.Equal(t, float64(tc.expectedID), item["id"]) - } - }) - } -} - -func Test_AddProjectItem(t *testing.T) { - serverTool := AddProjectItem(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "add_project_item", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "owner_type") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "project_number") - assert.Contains(t, schema.Properties, "item_type") - assert.Contains(t, schema.Properties, "item_id") - assert.ElementsMatch(t, schema.Required, []string{"owner_type", "owner", "project_number", "item_type", "item_id"}) - - orgItem := map[string]any{ - "id": 601, - "content_type": "Issue", - "creator": map[string]any{ - "login": "octocat", - "id": 1, - "html_url": "https://github.com/octocat", - "avatar_url": "https://avatars.githubusercontent.com/u/1?v=4", - }, - } - - userItem := map[string]any{ - "id": 701, - "content_type": "PullRequest", - "creator": map[string]any{ - "login": "hubot", - "id": 2, - "html_url": "https://github.com/hubot", - "avatar_url": "https://avatars.githubusercontent.com/u/2?v=4", - }, - } - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - expectedID int - expectedContentType string - expectedCreatorLogin string - }{ - { - name: "success organization issue", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostOrgsProjectsV2ItemsByProject: expectRequestBody(t, map[string]any{ - "type": "Issue", - "id": float64(9876), - }).andThen(mockResponse(t, http.StatusCreated, orgItem)), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(321), - "item_type": "issue", - "item_id": float64(9876), - }, - expectedID: 601, - expectedContentType: "Issue", - expectedCreatorLogin: "octocat", - }, - { - name: "success user pull request", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostUsersProjectsV2ItemsByUsernameByProject: expectRequestBody(t, map[string]any{ - "type": "PullRequest", - "id": float64(7654), - }).andThen(mockResponse(t, http.StatusCreated, userItem)), - }), - requestArgs: map[string]any{ - "owner": "octocat", - "owner_type": "user", - "project_number": float64(222), - "item_type": "pull_request", - "item_id": float64(7654), - }, - expectedID: 701, - expectedContentType: "PullRequest", - expectedCreatorLogin: "hubot", - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostOrgsProjectsV2ItemsByProject: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(999), - "item_type": "issue", - "item_id": float64(8888), - }, - expectError: true, - expectedErrMsg: ProjectAddFailedError, - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner_type": "org", - "project_number": float64(1), - "item_type": "Issue", - "item_id": float64(10), - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "project_number": float64(1), - "item_type": "Issue", - "item_id": float64(10), - }, - expectError: true, - }, - { - name: "missing project_number", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "item_type": "Issue", - "item_id": float64(10), - }, - expectError: true, - }, - { - name: "missing item_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1), - "item_id": float64(10), - }, - expectError: true, - }, - { - name: "missing item_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1), - "item_type": "Issue", - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - require.NoError(t, err) - - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - switch tc.name { - case "missing owner": - assert.Contains(t, text, "missing required parameter: owner") - case "missing owner_type": - assert.Contains(t, text, "missing required parameter: owner_type") - case "missing project_number": - assert.Contains(t, text, "missing required parameter: project_number") - case "missing item_type": - assert.Contains(t, text, "missing required parameter: item_type") - case "missing item_id": - assert.Contains(t, text, "missing required parameter: item_id") - // case "api error": - // assert.Contains(t, text, ProjectAddFailedError) - } - return - } - - require.False(t, result.IsError) - textContent := getTextResult(t, result) - var item map[string]any - require.NoError(t, json.Unmarshal([]byte(textContent.Text), &item)) - if tc.expectedID != 0 { - assert.Equal(t, float64(tc.expectedID), item["id"]) - } - if tc.expectedContentType != "" { - assert.Equal(t, tc.expectedContentType, item["content_type"]) - } - if tc.expectedCreatorLogin != "" { - creator, ok := item["creator"].(map[string]any) - require.True(t, ok) - assert.Equal(t, tc.expectedCreatorLogin, creator["login"]) - } - }) - } -} - -func Test_UpdateProjectItem(t *testing.T) { - serverTool := UpdateProjectItem(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "update_project_item", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "owner_type") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "project_number") - assert.Contains(t, schema.Properties, "item_id") - assert.Contains(t, schema.Properties, "updated_field") - assert.ElementsMatch(t, schema.Required, []string{"owner_type", "owner", "project_number", "item_id", "updated_field"}) - - orgUpdatedItem := map[string]any{ - "id": 801, - "content_type": "Issue", - } - userUpdatedItem := map[string]any{ - "id": 802, - "content_type": "PullRequest", - } - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - expectedID int - }{ - { - name: "success organization update", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PatchOrgsProjectsV2ItemsByProjectByItemID: expectRequestBody(t, map[string]any{ - "fields": []any{map[string]any{"id": float64(101), "value": "Done"}}, - }).andThen(mockResponse(t, http.StatusOK, orgUpdatedItem)), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1001), - "item_id": float64(5555), - "updated_field": map[string]any{ - "id": float64(101), - "value": "Done", - }, - }, - expectedID: 801, - }, - { - name: "success user update", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PatchUsersProjectsV2ItemsByUsernameByProjectByItemID: expectRequestBody(t, map[string]any{ - "fields": []any{map[string]any{"id": float64(202), "value": float64(42)}}, - }).andThen(mockResponse(t, http.StatusOK, userUpdatedItem)), - }), - requestArgs: map[string]any{ - "owner": "octocat", - "owner_type": "user", - "project_number": float64(2002), - "item_id": float64(6666), - "updated_field": map[string]any{ - "id": float64(202), - "value": float64(42), - }, - }, - expectedID: 802, - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PatchOrgsProjectsV2ItemsByProjectByItemID: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(3003), - "item_id": float64(7777), - "updated_field": map[string]any{ - "id": float64(303), - "value": "In Progress", - }, - }, - expectError: true, - expectedErrMsg: "failed to update a project item", - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner_type": "org", - "project_number": float64(1), - "item_id": float64(2), - "updated_field": map[string]any{ - "id": float64(1), - "value": "X", - }, - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "project_number": float64(1), - "item_id": float64(2), - "updated_field": map[string]any{ - "id": float64(1), - "value": "X", - }, - }, - expectError: true, - }, - { - name: "missing project_number", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "item_id": float64(2), - "updated_field": map[string]any{ - "id": float64(1), - "value": "X", - }, - }, - expectError: true, - }, - { - name: "missing item_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1), - "updated_field": map[string]any{ - "id": float64(1), - "value": "X", - }, - }, - expectError: true, - }, - { - name: "missing updated_field", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1), - "item_id": float64(2), - }, - expectError: true, - }, - { - name: "updated_field not object", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1), - "item_id": float64(2), - "updated_field": "not-an-object", - }, - expectError: true, - }, - { - name: "updated_field missing id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1), - "item_id": float64(2), - "updated_field": map[string]any{}, - }, - expectError: true, - }, - { - name: "updated_field missing value", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1), - "item_id": float64(2), - "updated_field": map[string]any{ - "id": float64(9), - }, - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - switch tc.name { - case "missing owner": - assert.Contains(t, text, "missing required parameter: owner") - case "missing owner_type": - assert.Contains(t, text, "missing required parameter: owner_type") - case "missing project_number": - assert.Contains(t, text, "missing required parameter: project_number") - case "missing item_id": - assert.Contains(t, text, "missing required parameter: item_id") - case "missing updated_field": - assert.Contains(t, text, "missing required parameter: updated_field") - case "updated_field not object": - assert.Contains(t, text, "field_value must be an object") - case "updated_field missing id": - assert.Contains(t, text, "updated_field.id is required") - case "updated_field missing value": - assert.Contains(t, text, "updated_field.value is required") - } - return - } - - require.False(t, result.IsError) - textContent := getTextResult(t, result) - var item map[string]any - require.NoError(t, json.Unmarshal([]byte(textContent.Text), &item)) - if tc.expectedID != 0 { - assert.Equal(t, float64(tc.expectedID), item["id"]) - } - }) - } -} - -func Test_DeleteProjectItem(t *testing.T) { - serverTool := DeleteProjectItem(translations.NullTranslationHelper) - tool := serverTool.Tool - require.NoError(t, toolsnaps.Test(tool.Name, tool)) - - assert.Equal(t, "delete_project_item", tool.Name) - assert.NotEmpty(t, tool.Description) - schema, ok := tool.InputSchema.(*jsonschema.Schema) - require.True(t, ok, "InputSchema should be a *jsonschema.Schema") - assert.Contains(t, schema.Properties, "owner_type") - assert.Contains(t, schema.Properties, "owner") - assert.Contains(t, schema.Properties, "project_number") - assert.Contains(t, schema.Properties, "item_id") - assert.ElementsMatch(t, schema.Required, []string{"owner_type", "owner", "project_number", "item_id"}) - - tests := []struct { - name string - mockedClient *http.Client - requestArgs map[string]any - expectError bool - expectedErrMsg string - expectedText string - }{ - { - name: "success organization delete", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - DeleteOrgsProjectsV2ItemsByProjectByItemID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusNoContent) - }), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(123), - "item_id": float64(555), - }, - expectedText: "project item successfully deleted", - }, - { - name: "success user delete", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - DeleteUsersProjectsV2ItemsByUsernameByProjectByItemID: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusNoContent) - }), - }), - requestArgs: map[string]any{ - "owner": "octocat", - "owner_type": "user", - "project_number": float64(456), - "item_id": float64(777), - }, - expectedText: "project item successfully deleted", - }, - { - name: "api error", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - DeleteOrgsProjectsV2ItemsByProjectByItemID: mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "boom"}), - }), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(321), - "item_id": float64(999), - }, - expectError: true, - expectedErrMsg: ProjectDeleteFailedError, - }, - { - name: "missing owner", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner_type": "org", - "project_number": float64(1), - "item_id": float64(10), - }, - expectError: true, - }, - { - name: "missing owner_type", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "project_number": float64(1), - "item_id": float64(10), - }, - expectError: true, - }, - { - name: "missing project_number", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "item_id": float64(10), - }, - expectError: true, - }, - { - name: "missing item_id", - mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]any{ - "owner": "octo-org", - "owner_type": "org", - "project_number": float64(1), - }, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - client := gh.NewClient(tc.mockedClient) - deps := BaseDeps{ - Client: client, - } - handler := serverTool.Handler(deps) - request := createMCPRequest(tc.requestArgs) - result, err := handler(ContextWithDeps(context.Background(), deps), &request) - - require.NoError(t, err) - if tc.expectError { - require.True(t, result.IsError) - text := getTextResult(t, result).Text - if tc.expectedErrMsg != "" { - assert.Contains(t, text, tc.expectedErrMsg) - } - switch tc.name { - case "missing owner": - assert.Contains(t, text, "missing required parameter: owner") - case "missing owner_type": - assert.Contains(t, text, "missing required parameter: owner_type") - case "missing project_number": - assert.Contains(t, text, "missing required parameter: project_number") - case "missing item_id": - assert.Contains(t, text, "missing required parameter: item_id") - } - return - } - - require.False(t, result.IsError) - text := getTextResult(t, result).Text - assert.Contains(t, text, tc.expectedText) - }) - } -} - // Tests for consolidated project tools func Test_ProjectsList(t *testing.T) { @@ -1639,7 +123,7 @@ func Test_ProjectsList_ListProjects(t *testing.T) { var response map[string]any err = json.Unmarshal([]byte(textContent.Text), &response) require.NoError(t, err) - projects, ok := response["projects"].([]interface{}) + projects, ok := response["projects"].([]any) require.True(t, ok) assert.Equal(t, tc.expectedLength, len(projects)) }) @@ -1676,7 +160,7 @@ func Test_ProjectsList_ListProjectFields(t *testing.T) { var response map[string]any err = json.Unmarshal([]byte(textContent.Text), &response) require.NoError(t, err) - fieldsList, ok := response["fields"].([]interface{}) + fieldsList, ok := response["fields"].([]any) require.True(t, ok) assert.Equal(t, 1, len(fieldsList)) }) @@ -1732,7 +216,7 @@ func Test_ProjectsList_ListProjectItems(t *testing.T) { var response map[string]any err = json.Unmarshal([]byte(textContent.Text), &response) require.NoError(t, err) - itemsList, ok := response["items"].([]interface{}) + itemsList, ok := response["items"].([]any) require.True(t, ok) assert.Equal(t, 1, len(itemsList)) }) @@ -1752,7 +236,7 @@ func Test_ProjectsGet(t *testing.T) { assert.Contains(t, inputSchema.Properties, "project_number") assert.Contains(t, inputSchema.Properties, "field_id") assert.Contains(t, inputSchema.Properties, "item_id") - assert.ElementsMatch(t, inputSchema.Required, []string{"method", "owner", "project_number"}) + assert.ElementsMatch(t, inputSchema.Required, []string{"method"}) } func Test_ProjectsGet_GetProject(t *testing.T) { @@ -2330,3 +814,209 @@ func Test_ProjectsWrite_DeleteProjectItem(t *testing.T) { assert.Contains(t, textContent.Text, "missing required parameter: item_id") }) } + +func Test_ProjectsList_ListProjectStatusUpdates(t *testing.T) { + toolDef := ProjectsList(translations.NullTranslationHelper) + + t.Run("success via consolidated tool", func(t *testing.T) { + // REST mock for detectOwnerType (when owner_type is omitted) + restClient := MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ + GetUsersProjectsV2ByUsernameByProject: mockResponse(t, http.StatusOK, map[string]any{"id": 1}), + }) + + // GQL mock for listProjectStatusUpdates + gqlMockedClient := githubv4mock.NewMockedHTTPClient( + githubv4mock.NewQueryMatcher( + statusUpdatesUserQuery{}, + map[string]any{ + "owner": githubv4.String("octocat"), + "projectNumber": githubv4.Int(1), + "first": githubv4.Int(50), + "after": (*githubv4.String)(nil), + }, + githubv4mock.DataResponse(map[string]any{ + "user": map[string]any{ + "projectV2": map[string]any{ + "statusUpdates": map[string]any{ + "nodes": []map[string]any{ + { + "id": "SU_1", + "body": "On track", + "status": "ON_TRACK", + "createdAt": "2026-01-15T10:00:00Z", + "startDate": "2026-01-01", + "targetDate": "2026-03-01", + "creator": map[string]any{"login": "octocat"}, + }, + }, + "pageInfo": map[string]any{ + "hasNextPage": false, + "hasPreviousPage": false, + "startCursor": "", + "endCursor": "", + }, + }, + }, + }, + }), + ), + ) + + gqlClient := githubv4.NewClient(gqlMockedClient) + deps := BaseDeps{ + Client: gh.NewClient(restClient), + GQLClient: gqlClient, + } + handler := toolDef.Handler(deps) + request := createMCPRequest(map[string]any{ + "method": "list_project_status_updates", + "owner": "octocat", + "project_number": float64(1), + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + + require.NoError(t, err) + require.False(t, result.IsError) + + textContent := getTextResult(t, result) + var response map[string]any + err = json.Unmarshal([]byte(textContent.Text), &response) + require.NoError(t, err) + updates, ok := response["statusUpdates"].([]any) + require.True(t, ok) + assert.Len(t, updates, 1) + }) +} + +func Test_ProjectsGet_GetProjectStatusUpdate(t *testing.T) { + toolDef := ProjectsGet(translations.NullTranslationHelper) + + t.Run("success via consolidated tool", func(t *testing.T) { + gqlMockedClient := githubv4mock.NewMockedHTTPClient( + githubv4mock.NewQueryMatcher( + statusUpdateNodeQuery{}, + map[string]any{ + "id": githubv4.ID("SU_abc123"), + }, + githubv4mock.DataResponse(map[string]any{ + "node": map[string]any{ + "id": "SU_abc123", + "body": "On track", + "status": "ON_TRACK", + "createdAt": "2026-01-15T10:00:00Z", + "startDate": "2026-01-01", + "targetDate": "2026-03-01", + "creator": map[string]any{"login": "octocat"}, + }, + }), + ), + ) + + gqlClient := githubv4.NewClient(gqlMockedClient) + deps := BaseDeps{ + GQLClient: gqlClient, + } + handler := toolDef.Handler(deps) + request := createMCPRequest(map[string]any{ + "method": "get_project_status_update", + "owner": "octocat", + "project_number": float64(1), + "status_update_id": "SU_abc123", + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + + require.NoError(t, err) + require.False(t, result.IsError) + + textContent := getTextResult(t, result) + var response map[string]any + err = json.Unmarshal([]byte(textContent.Text), &response) + require.NoError(t, err) + assert.Equal(t, "SU_abc123", response["id"]) + assert.Equal(t, "On track", response["body"]) + }) +} + +func Test_ProjectsWrite_CreateProjectStatusUpdate(t *testing.T) { + toolDef := ProjectsWrite(translations.NullTranslationHelper) + + t.Run("success via consolidated tool", func(t *testing.T) { + bodyStr := githubv4.String("Consolidated test") + statusStr := githubv4.String("AT_RISK") + + gqlMockedClient := githubv4mock.NewMockedHTTPClient( + // Mock project ID query for user + githubv4mock.NewQueryMatcher( + struct { + User struct { + ProjectV2 struct { + ID githubv4.ID + } `graphql:"projectV2(number: $projectNumber)"` + } `graphql:"user(login: $owner)"` + }{}, + map[string]any{ + "owner": githubv4.String("octocat"), + "projectNumber": githubv4.Int(3), + }, + githubv4mock.DataResponse(map[string]any{ + "user": map[string]any{ + "projectV2": map[string]any{ + "id": "PVT_project3", + }, + }, + }), + ), + // Mock createProjectV2StatusUpdate mutation + githubv4mock.NewMutationMatcher( + struct { + CreateProjectV2StatusUpdate struct { + StatusUpdate statusUpdateNode + } `graphql:"createProjectV2StatusUpdate(input: $input)"` + }{}, + CreateProjectV2StatusUpdateInput{ + ProjectID: githubv4.ID("PVT_project3"), + Body: &bodyStr, + Status: &statusStr, + }, + nil, + githubv4mock.DataResponse(map[string]any{ + "createProjectV2StatusUpdate": map[string]any{ + "statusUpdate": map[string]any{ + "id": "PVTSU_su003", + "body": "Consolidated test", + "status": "AT_RISK", + "createdAt": "2026-02-09T12:00:00Z", + "creator": map[string]any{"login": "octocat"}, + }, + }, + }), + ), + ) + + gqlClient := githubv4.NewClient(gqlMockedClient) + deps := BaseDeps{ + GQLClient: gqlClient, + } + handler := toolDef.Handler(deps) + request := createMCPRequest(map[string]any{ + "method": "create_project_status_update", + "owner": "octocat", + "owner_type": "user", + "project_number": float64(3), + "body": "Consolidated test", + "status": "AT_RISK", + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + + require.NoError(t, err) + require.False(t, result.IsError) + + textContent := getTextResult(t, result) + var response map[string]any + err = json.Unmarshal([]byte(textContent.Text), &response) + require.NoError(t, err) + assert.Equal(t, "PVTSU_su003", response["id"]) + assert.Equal(t, "Consolidated test", response["body"]) + assert.Equal(t, "AT_RISK", response["status"]) + }) +} diff --git a/pkg/github/pullrequests.go b/pkg/github/pullrequests.go index f546865b21..58edc07dc1 100644 --- a/pkg/github/pullrequests.go +++ b/pkg/github/pullrequests.go @@ -8,14 +8,13 @@ import ( "net/http" "github.com/go-viper/mapstructure/v2" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/shurcooL/githubv4" ghErrors "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/inventory" - "github.com/github/github-mcp-server/pkg/lockdown" "github.com/github/github-mcp-server/pkg/octicons" "github.com/github/github-mcp-server/pkg/sanitize" "github.com/github/github-mcp-server/pkg/scopes" @@ -101,7 +100,7 @@ Possible options: switch method { case "get": - result, err := GetPullRequest(ctx, client, deps.GetRepoAccessCache(), owner, repo, pullNumber, deps.GetFlags()) + result, err := GetPullRequest(ctx, client, deps, owner, repo, pullNumber) return result, nil, err case "get_diff": result, err := GetPullRequestDiff(ctx, client, owner, repo, pullNumber) @@ -121,13 +120,13 @@ Possible options: if err != nil { return utils.NewToolResultError(err.Error()), nil, nil } - result, err := GetPullRequestReviewComments(ctx, gqlClient, deps.GetRepoAccessCache(), owner, repo, pullNumber, cursorPagination, deps.GetFlags()) + result, err := GetPullRequestReviewComments(ctx, gqlClient, deps, owner, repo, pullNumber, cursorPagination) return result, nil, err case "get_reviews": - result, err := GetPullRequestReviews(ctx, client, deps.GetRepoAccessCache(), owner, repo, pullNumber, deps.GetFlags()) + result, err := GetPullRequestReviews(ctx, client, deps, owner, repo, pullNumber) return result, nil, err case "get_comments": - result, err := GetIssueComments(ctx, client, deps.GetRepoAccessCache(), owner, repo, pullNumber, pagination, deps.GetFlags()) + result, err := GetIssueComments(ctx, client, deps, owner, repo, pullNumber, pagination) return result, nil, err default: return utils.NewToolResultError(fmt.Sprintf("unknown method: %s", method)), nil, nil @@ -135,7 +134,13 @@ Possible options: }) } -func GetPullRequest(ctx context.Context, client *github.Client, cache *lockdown.RepoAccessCache, owner, repo string, pullNumber int, ff FeatureFlags) (*mcp.CallToolResult, error) { +func GetPullRequest(ctx context.Context, client *github.Client, deps ToolDependencies, owner, repo string, pullNumber int) (*mcp.CallToolResult, error) { + cache, err := deps.GetRepoAccessCache(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get repo access cache: %w", err) + } + ff := deps.GetFlags(ctx) + pr, resp, err := client.PullRequests.Get(ctx, owner, repo, pullNumber) if err != nil { return ghErrors.NewGitHubAPIErrorResponse(ctx, @@ -181,12 +186,9 @@ func GetPullRequest(ctx context.Context, client *github.Client, cache *lockdown. } } - r, err := json.Marshal(pr) - if err != nil { - return nil, fmt.Errorf("failed to marshal response: %w", err) - } + minimalPR := convertToMinimalPullRequest(pr) - return utils.NewToolResultText(string(r)), nil + return MarshalledTextResult(minimalPR), nil } func GetPullRequestDiff(ctx context.Context, client *github.Client, owner, repo string, pullNumber int) (*mcp.CallToolResult, error) { @@ -340,7 +342,13 @@ type pageInfoFragment struct { EndCursor githubv4.String } -func GetPullRequestReviewComments(ctx context.Context, gqlClient *githubv4.Client, cache *lockdown.RepoAccessCache, owner, repo string, pullNumber int, pagination CursorPaginationParams, ff FeatureFlags) (*mcp.CallToolResult, error) { +func GetPullRequestReviewComments(ctx context.Context, gqlClient *githubv4.Client, deps ToolDependencies, owner, repo string, pullNumber int, pagination CursorPaginationParams) (*mcp.CallToolResult, error) { + cache, err := deps.GetRepoAccessCache(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get repo access cache: %w", err) + } + ff := deps.GetFlags(ctx) + // Convert pagination parameters to GraphQL format gqlParams, err := pagination.ToGraphQLParams() if err != nil { @@ -421,7 +429,13 @@ func GetPullRequestReviewComments(ctx context.Context, gqlClient *githubv4.Clien return utils.NewToolResultText(string(r)), nil } -func GetPullRequestReviews(ctx context.Context, client *github.Client, cache *lockdown.RepoAccessCache, owner, repo string, pullNumber int, ff FeatureFlags) (*mcp.CallToolResult, error) { +func GetPullRequestReviews(ctx context.Context, client *github.Client, deps ToolDependencies, owner, repo string, pullNumber int) (*mcp.CallToolResult, error) { + cache, err := deps.GetRepoAccessCache(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get repo access cache: %w", err) + } + ff := deps.GetFlags(ctx) + reviews, resp, err := client.PullRequests.ListReviews(ctx, owner, repo, pullNumber, nil) if err != nil { return ghErrors.NewGitHubAPIErrorResponse(ctx, @@ -468,47 +482,11 @@ func GetPullRequestReviews(ctx context.Context, client *github.Client, cache *lo return utils.NewToolResultText(string(r)), nil } +// PullRequestWriteUIResourceURI is the URI for the create_pull_request tool's MCP App UI resource. +const PullRequestWriteUIResourceURI = "ui://github-mcp-server/pr-write" + // CreatePullRequest creates a tool to create a new pull request. func CreatePullRequest(t translations.TranslationHelperFunc) inventory.ServerTool { - schema := &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "owner": { - Type: "string", - Description: "Repository owner", - }, - "repo": { - Type: "string", - Description: "Repository name", - }, - "title": { - Type: "string", - Description: "PR title", - }, - "body": { - Type: "string", - Description: "PR description", - }, - "head": { - Type: "string", - Description: "Branch containing changes", - }, - "base": { - Type: "string", - Description: "Branch to merge into", - }, - "draft": { - Type: "boolean", - Description: "Create as draft PR", - }, - "maintainer_can_modify": { - Type: "boolean", - Description: "Allow maintainer edits", - }, - }, - Required: []string{"owner", "repo", "title", "head", "base"}, - } - return NewTool( ToolsetMetadataPullRequests, mcp.Tool{ @@ -518,10 +496,53 @@ func CreatePullRequest(t translations.TranslationHelperFunc) inventory.ServerToo Title: t("TOOL_CREATE_PULL_REQUEST_USER_TITLE", "Open new pull request"), ReadOnlyHint: false, }, - InputSchema: schema, + Meta: mcp.Meta{ + "ui": map[string]any{ + "resourceUri": PullRequestWriteUIResourceURI, + "visibility": []string{"model", "app"}, + }, + }, + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "owner": { + Type: "string", + Description: "Repository owner", + }, + "repo": { + Type: "string", + Description: "Repository name", + }, + "title": { + Type: "string", + Description: "PR title", + }, + "body": { + Type: "string", + Description: "PR description", + }, + "head": { + Type: "string", + Description: "Branch containing changes", + }, + "base": { + Type: "string", + Description: "Branch to merge into", + }, + "draft": { + Type: "boolean", + Description: "Create as draft PR", + }, + "maintainer_can_modify": { + Type: "boolean", + Description: "Allow maintainer edits", + }, + }, + Required: []string{"owner", "repo", "title", "head", "base"}, + }, }, []scopes.Scope{scopes.Repo}, - func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { + func(ctx context.Context, deps ToolDependencies, req *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { owner, err := RequiredParam[string](args, "owner") if err != nil { return utils.NewToolResultError(err.Error()), nil, nil @@ -530,18 +551,38 @@ func CreatePullRequest(t translations.TranslationHelperFunc) inventory.ServerToo if err != nil { return utils.NewToolResultError(err.Error()), nil, nil } - title, err := RequiredParam[string](args, "title") + + // When insiders mode is enabled and the client supports MCP Apps UI, + // check if this is a UI form submission. The UI sends _ui_submitted=true + // to distinguish form submissions from LLM calls. + uiSubmitted, _ := OptionalParam[bool](args, "_ui_submitted") + + if deps.GetFlags(ctx).InsidersMode && clientSupportsUI(req) && !uiSubmitted { + return utils.NewToolResultText(fmt.Sprintf("Ready to create a pull request in %s/%s. The user will review and confirm via the interactive form.", owner, repo)), nil, nil + } + + // When creating PR, title/head/base are required + title, err := OptionalParam[string](args, "title") if err != nil { return utils.NewToolResultError(err.Error()), nil, nil } - head, err := RequiredParam[string](args, "head") + head, err := OptionalParam[string](args, "head") if err != nil { return utils.NewToolResultError(err.Error()), nil, nil } - base, err := RequiredParam[string](args, "base") + base, err := OptionalParam[string](args, "base") if err != nil { return utils.NewToolResultError(err.Error()), nil, nil } + if title == "" { + return utils.NewToolResultError("missing required parameter: title"), nil, nil + } + if head == "" { + return utils.NewToolResultError("missing required parameter: head"), nil, nil + } + if base == "" { + return utils.NewToolResultError("missing required parameter: base"), nil, nil + } body, err := OptionalParam[string](args, "body") if err != nil { @@ -787,7 +828,7 @@ func UpdatePullRequest(t translations.TranslationHelperFunc) inventory.ServerToo } `graphql:"repository(owner: $owner, name: $repo)"` } - err = gqlClient.Query(ctx, &prQuery, map[string]interface{}{ + err = gqlClient.Query(ctx, &prQuery, map[string]any{ "owner": githubv4.String(owner), "repo": githubv4.String(repo), "prNum": githubv4.Int(pullNumber), // #nosec G115 - pull request numbers are always small positive integers @@ -902,6 +943,97 @@ func UpdatePullRequest(t translations.TranslationHelperFunc) inventory.ServerToo }) } +// AddReplyToPullRequestComment creates a tool to add a reply to an existing pull request comment. +func AddReplyToPullRequestComment(t translations.TranslationHelperFunc) inventory.ServerTool { + schema := &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "owner": { + Type: "string", + Description: "Repository owner", + }, + "repo": { + Type: "string", + Description: "Repository name", + }, + "pullNumber": { + Type: "number", + Description: "Pull request number", + }, + "commentId": { + Type: "number", + Description: "The ID of the comment to reply to", + }, + "body": { + Type: "string", + Description: "The text of the reply", + }, + }, + Required: []string{"owner", "repo", "pullNumber", "commentId", "body"}, + } + + return NewTool( + ToolsetMetadataPullRequests, + mcp.Tool{ + Name: "add_reply_to_pull_request_comment", + Description: t("TOOL_ADD_REPLY_TO_PULL_REQUEST_COMMENT_DESCRIPTION", "Add a reply to an existing pull request comment. This creates a new comment that is linked as a reply to the specified comment."), + Annotations: &mcp.ToolAnnotations{ + Title: t("TOOL_ADD_REPLY_TO_PULL_REQUEST_COMMENT_USER_TITLE", "Add reply to pull request comment"), + ReadOnlyHint: false, + }, + InputSchema: schema, + }, + []scopes.Scope{scopes.Repo}, + func(ctx context.Context, deps ToolDependencies, _ *mcp.CallToolRequest, args map[string]any) (*mcp.CallToolResult, any, error) { + owner, err := RequiredParam[string](args, "owner") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + repo, err := RequiredParam[string](args, "repo") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + pullNumber, err := RequiredInt(args, "pullNumber") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + commentID, err := RequiredInt(args, "commentId") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + body, err := RequiredParam[string](args, "body") + if err != nil { + return utils.NewToolResultError(err.Error()), nil, nil + } + + client, err := deps.GetClient(ctx) + if err != nil { + return utils.NewToolResultErrorFromErr("failed to get GitHub client", err), nil, nil + } + + comment, resp, err := client.PullRequests.CreateCommentInReplyTo(ctx, owner, repo, pullNumber, body, int64(commentID)) + if err != nil { + return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to add reply to pull request comment", resp, err), nil, nil + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusCreated { + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return utils.NewToolResultErrorFromErr("failed to read response body", err), nil, nil + } + return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, "failed to add reply to pull request comment", resp, bodyBytes), nil, nil + } + + r, err := json.Marshal(comment) + if err != nil { + return utils.NewToolResultErrorFromErr("failed to marshal response", err), nil, nil + } + + return utils.NewToolResultText(string(r)), nil, nil + }) +} + // ListPullRequests creates a tool to list and filter repository pull requests. func ListPullRequests(t translations.TranslationHelperFunc) inventory.ServerTool { schema := &jsonschema.Schema{ diff --git a/pkg/github/pullrequests_test.go b/pkg/github/pullrequests_test.go index d2664479d8..570b1906f4 100644 --- a/pkg/github/pullrequests_test.go +++ b/pkg/github/pullrequests_test.go @@ -11,7 +11,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/lockdown" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/shurcooL/githubv4" "github.com/stretchr/testify/assert" @@ -55,7 +55,7 @@ func Test_GetPullRequest(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedPR *github.PullRequest expectedErrMsg string @@ -65,7 +65,7 @@ func Test_GetPullRequest(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposPullsByOwnerByRepoByPullNumber: mockResponse(t, http.StatusOK, mockPR), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get", "owner": "owner", "repo": "repo", @@ -82,7 +82,7 @@ func Test_GetPullRequest(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get", "owner": "owner", "repo": "repo", @@ -127,14 +127,14 @@ func Test_GetPullRequest(t *testing.T) { // Parse the result and get the text content if no error textContent := getTextResult(t, result) - // Unmarshal and verify the result - var returnedPR github.PullRequest + // Unmarshal and verify the minimal result + var returnedPR MinimalPullRequest err = json.Unmarshal([]byte(textContent.Text), &returnedPR) require.NoError(t, err) - assert.Equal(t, *tc.expectedPR.Number, *returnedPR.Number) - assert.Equal(t, *tc.expectedPR.Title, *returnedPR.Title) - assert.Equal(t, *tc.expectedPR.State, *returnedPR.State) - assert.Equal(t, *tc.expectedPR.HTMLURL, *returnedPR.HTMLURL) + assert.Equal(t, tc.expectedPR.GetNumber(), returnedPR.Number) + assert.Equal(t, tc.expectedPR.GetTitle(), returnedPR.Title) + assert.Equal(t, tc.expectedPR.GetState(), returnedPR.State) + assert.Equal(t, tc.expectedPR.GetHTMLURL(), returnedPR.HTMLURL) }) } } @@ -194,7 +194,7 @@ func Test_UpdatePullRequest(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedPR *github.PullRequest expectedErrMsg string @@ -202,7 +202,7 @@ func Test_UpdatePullRequest(t *testing.T) { { name: "successful PR update (title, body, base, maintainer_can_modify)", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PatchReposPullsByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]interface{}{ + PatchReposPullsByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]any{ "title": "Updated Test PR Title", "body": "Updated test PR body.", "base": "develop", @@ -212,7 +212,7 @@ func Test_UpdatePullRequest(t *testing.T) { ), GetReposPullsByOwnerByRepoByPullNumber: mockResponse(t, http.StatusOK, mockUpdatedPR), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -227,14 +227,14 @@ func Test_UpdatePullRequest(t *testing.T) { { name: "successful PR update (state)", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PatchReposPullsByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]interface{}{ + PatchReposPullsByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]any{ "state": "closed", }).andThen( mockResponse(t, http.StatusOK, mockClosedPR), ), GetReposPullsByOwnerByRepoByPullNumber: mockResponse(t, http.StatusOK, mockClosedPR), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -249,11 +249,11 @@ func Test_UpdatePullRequest(t *testing.T) { PostReposPullsRequestedReviewersByOwnerByRepoByPullNumber: mockResponse(t, http.StatusOK, mockPRWithReviewers), GetReposPullsByOwnerByRepoByPullNumber: mockResponse(t, http.StatusOK, mockPRWithReviewers), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), - "reviewers": []interface{}{"reviewer1", "reviewer2"}, + "reviewers": []any{"reviewer1", "reviewer2"}, }, expectError: false, expectedPR: mockPRWithReviewers, @@ -261,14 +261,14 @@ func Test_UpdatePullRequest(t *testing.T) { { name: "successful PR update (title only)", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PatchReposPullsByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]interface{}{ + PatchReposPullsByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]any{ "title": "Updated Test PR Title", }).andThen( mockResponse(t, http.StatusOK, mockUpdatedPR), ), GetReposPullsByOwnerByRepoByPullNumber: mockResponse(t, http.StatusOK, mockUpdatedPR), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -280,7 +280,7 @@ func Test_UpdatePullRequest(t *testing.T) { { name: "no update parameters provided", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), // No API call expected - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -297,7 +297,7 @@ func Test_UpdatePullRequest(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Validation Failed"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -314,11 +314,11 @@ func Test_UpdatePullRequest(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Invalid reviewers"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), - "reviewers": []interface{}{"invalid-user"}, + "reviewers": []any{"invalid-user"}, }, expectError: true, expectedErrMsg: "failed to request reviewers", @@ -386,7 +386,7 @@ func Test_UpdatePullRequest_Draft(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedPR *github.PullRequest expectedErrMsg string @@ -440,7 +440,7 @@ func Test_UpdatePullRequest_Draft(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -498,7 +498,7 @@ func Test_UpdatePullRequest_Draft(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -591,7 +591,7 @@ func Test_ListPullRequests(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedPRs []*github.PullRequest expectedErrMsg string @@ -609,7 +609,7 @@ func Test_ListPullRequests(t *testing.T) { mockResponse(t, http.StatusOK, mockPRs), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "state": "all", @@ -629,7 +629,7 @@ func Test_ListPullRequests(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Invalid request"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "state": "invalid", @@ -712,7 +712,7 @@ func Test_MergePullRequest(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedMergeResult *github.PullRequestMergeResult expectedErrMsg string @@ -720,7 +720,7 @@ func Test_MergePullRequest(t *testing.T) { { name: "successful merge", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PutReposPullsMergeByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]interface{}{ + PutReposPullsMergeByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]any{ "commit_title": "Merge PR #42", "commit_message": "Merging awesome feature", "merge_method": "squash", @@ -728,7 +728,7 @@ func Test_MergePullRequest(t *testing.T) { mockResponse(t, http.StatusOK, mockMergeResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -747,7 +747,7 @@ func Test_MergePullRequest(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Pull request cannot be merged"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -848,7 +848,7 @@ func Test_SearchPullRequests(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult *github.IssuesSearchResult expectedErrMsg string @@ -869,7 +869,7 @@ func Test_SearchPullRequests(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "repo:owner/repo is:open", "sort": "created", "order": "desc", @@ -895,7 +895,7 @@ func Test_SearchPullRequests(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "draft:false", "owner": "test-owner", "repo": "test-repo", @@ -919,7 +919,7 @@ func Test_SearchPullRequests(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "feature", "owner": "test-owner", }, @@ -940,7 +940,7 @@ func Test_SearchPullRequests(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "review-required", "repo": "test-repo", }, @@ -952,7 +952,7 @@ func Test_SearchPullRequests(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetSearchIssues: mockResponse(t, http.StatusOK, mockSearchResult), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "is:pr repo:owner/repo is:open", }, expectError: false, @@ -972,7 +972,7 @@ func Test_SearchPullRequests(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "is:pr repo:github/github-mcp-server is:open draft:false", }, expectError: false, @@ -992,7 +992,7 @@ func Test_SearchPullRequests(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "repo:github/github-mcp-server author:octocat", "owner": "different-owner", "repo": "different-repo", @@ -1014,7 +1014,7 @@ func Test_SearchPullRequests(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "is:pr repo:github/github-mcp-server (label:bug OR label:enhancement OR label:feature)", }, expectError: false, @@ -1028,7 +1028,7 @@ func Test_SearchPullRequests(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Validation Failed"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "invalid:query", }, expectError: true, @@ -1126,7 +1126,7 @@ func Test_GetPullRequestFiles(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedFiles []*github.CommitFile expectedErrMsg string @@ -1141,7 +1141,7 @@ func Test_GetPullRequestFiles(t *testing.T) { mockResponse(t, http.StatusOK, mockFiles), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_files", "owner": "owner", "repo": "repo", @@ -1160,7 +1160,7 @@ func Test_GetPullRequestFiles(t *testing.T) { mockResponse(t, http.StatusOK, mockFiles), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_files", "owner": "owner", "repo": "repo", @@ -1184,7 +1184,7 @@ func Test_GetPullRequestFiles(t *testing.T) { }), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_files", "owner": "owner", "repo": "repo", @@ -1298,7 +1298,7 @@ func Test_GetPullRequestStatus(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedStatus *github.CombinedStatus expectedErrMsg string @@ -1309,7 +1309,7 @@ func Test_GetPullRequestStatus(t *testing.T) { GetReposPullsByOwnerByRepoByPullNumber: mockResponse(t, http.StatusOK, mockPR), GetReposCommitsStatusByOwnerByRepoByRef: mockResponse(t, http.StatusOK, mockStatus), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_status", "owner": "owner", "repo": "repo", @@ -1326,7 +1326,7 @@ func Test_GetPullRequestStatus(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_status", "owner": "owner", "repo": "repo", @@ -1344,7 +1344,7 @@ func Test_GetPullRequestStatus(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_status", "owner": "owner", "repo": "repo", @@ -1428,7 +1428,7 @@ func Test_UpdatePullRequestBranch(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedUpdateResult *github.PullRequestBranchUpdateResponse expectedErrMsg string @@ -1436,13 +1436,13 @@ func Test_UpdatePullRequestBranch(t *testing.T) { { name: "successful branch update", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PutReposPullsUpdateBranchByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]interface{}{ + PutReposPullsUpdateBranchByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]any{ "expected_head_sha": "abcd1234", }).andThen( mockResponse(t, http.StatusAccepted, mockUpdateResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -1454,11 +1454,11 @@ func Test_UpdatePullRequestBranch(t *testing.T) { { name: "branch update without expected SHA", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PutReposPullsUpdateBranchByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]interface{}{}).andThen( + PutReposPullsUpdateBranchByOwnerByRepoByPullNumber: expectRequestBody(t, map[string]any{}).andThen( mockResponse(t, http.StatusAccepted, mockUpdateResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -1474,7 +1474,7 @@ func Test_UpdatePullRequestBranch(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Merge conflict"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "pullNumber": float64(42), @@ -1538,7 +1538,7 @@ func Test_GetPullRequestComments(t *testing.T) { tests := []struct { name string gqlHTTPClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedErrMsg string lockdownEnabled bool @@ -1549,7 +1549,7 @@ func Test_GetPullRequestComments(t *testing.T) { gqlHTTPClient: githubv4mock.NewMockedHTTPClient( githubv4mock.NewQueryMatcher( reviewThreadsQuery{}, - map[string]interface{}{ + map[string]any{ "owner": githubv4.String("owner"), "repo": githubv4.String("repo"), "prNum": githubv4.Int(42), @@ -1611,7 +1611,7 @@ func Test_GetPullRequestComments(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_review_comments", "owner": "owner", "repo": "repo", @@ -1619,7 +1619,7 @@ func Test_GetPullRequestComments(t *testing.T) { }, expectError: false, validateResult: func(t *testing.T, textContent string) { - var result map[string]interface{} + var result map[string]any err := json.Unmarshal([]byte(textContent), &result) require.NoError(t, err) @@ -1629,28 +1629,28 @@ func Test_GetPullRequestComments(t *testing.T) { assert.Contains(t, result, "totalCount") // Validate review threads - threads := result["reviewThreads"].([]interface{}) + threads := result["reviewThreads"].([]any) assert.Len(t, threads, 1) - thread := threads[0].(map[string]interface{}) + thread := threads[0].(map[string]any) assert.Equal(t, "RT_kwDOA0xdyM4AX1Yz", thread["ID"]) assert.Equal(t, false, thread["IsResolved"]) assert.Equal(t, false, thread["IsOutdated"]) assert.Equal(t, false, thread["IsCollapsed"]) // Validate comments within thread - comments := thread["Comments"].(map[string]interface{}) - commentNodes := comments["Nodes"].([]interface{}) + comments := thread["Comments"].(map[string]any) + commentNodes := comments["Nodes"].([]any) assert.Len(t, commentNodes, 2) // Validate first comment - comment1 := commentNodes[0].(map[string]interface{}) + comment1 := commentNodes[0].(map[string]any) assert.Equal(t, "PRRC_kwDOA0xdyM4AX1Y0", comment1["ID"]) assert.Equal(t, "This looks good", comment1["Body"]) assert.Equal(t, "file1.go", comment1["Path"]) // Validate pagination info - pageInfo := result["pageInfo"].(map[string]interface{}) + pageInfo := result["pageInfo"].(map[string]any) assert.Equal(t, false, pageInfo["hasNextPage"]) assert.Equal(t, false, pageInfo["hasPreviousPage"]) assert.Equal(t, "cursor1", pageInfo["startCursor"]) @@ -1665,7 +1665,7 @@ func Test_GetPullRequestComments(t *testing.T) { gqlHTTPClient: githubv4mock.NewMockedHTTPClient( githubv4mock.NewQueryMatcher( reviewThreadsQuery{}, - map[string]interface{}{ + map[string]any{ "owner": githubv4.String("owner"), "repo": githubv4.String("repo"), "prNum": githubv4.Int(999), @@ -1676,7 +1676,7 @@ func Test_GetPullRequestComments(t *testing.T) { githubv4mock.ErrorResponse("Could not resolve to a PullRequest with the number of 999."), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_review_comments", "owner": "owner", "repo": "repo", @@ -1690,7 +1690,7 @@ func Test_GetPullRequestComments(t *testing.T) { gqlHTTPClient: githubv4mock.NewMockedHTTPClient( githubv4mock.NewQueryMatcher( reviewThreadsQuery{}, - map[string]interface{}{ + map[string]any{ "owner": githubv4.String("owner"), "repo": githubv4.String("repo"), "prNum": githubv4.Int(42), @@ -1752,7 +1752,7 @@ func Test_GetPullRequestComments(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_review_comments", "owner": "owner", "repo": "repo", @@ -1761,25 +1761,25 @@ func Test_GetPullRequestComments(t *testing.T) { expectError: false, lockdownEnabled: true, validateResult: func(t *testing.T, textContent string) { - var result map[string]interface{} + var result map[string]any err := json.Unmarshal([]byte(textContent), &result) require.NoError(t, err) // Validate that only maintainer comment is returned - threads := result["reviewThreads"].([]interface{}) + threads := result["reviewThreads"].([]any) assert.Len(t, threads, 1) - thread := threads[0].(map[string]interface{}) - comments := thread["Comments"].(map[string]interface{}) + thread := threads[0].(map[string]any) + comments := thread["Comments"].(map[string]any) // Should only have 1 comment (maintainer) after filtering assert.Equal(t, float64(1), comments["TotalCount"]) - commentNodes := comments["Nodes"].([]interface{}) + commentNodes := comments["Nodes"].([]any) assert.Len(t, commentNodes, 1) - comment := commentNodes[0].(map[string]interface{}) - author := comment["Author"].(map[string]interface{}) + comment := commentNodes[0].(map[string]any) + author := comment["Author"].(map[string]any) assert.Equal(t, "maintainer", author["Login"]) assert.Equal(t, "Maintainer review comment", comment["Body"]) }, @@ -1888,7 +1888,7 @@ func Test_GetPullRequestReviews(t *testing.T) { name string mockedClient *http.Client gqlHTTPClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedReviews []*github.PullRequestReview expectedErrMsg string @@ -1899,7 +1899,7 @@ func Test_GetPullRequestReviews(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposPullsReviewsByOwnerByRepoByPullNumber: mockResponse(t, http.StatusOK, mockReviews), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_reviews", "owner": "owner", "repo": "repo", @@ -1916,7 +1916,7 @@ func Test_GetPullRequestReviews(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_reviews", "owner": "owner", "repo": "repo", @@ -1944,7 +1944,7 @@ func Test_GetPullRequestReviews(t *testing.T) { }), }), gqlHTTPClient: newRepoAccessHTTPClient(), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "method": "get_reviews", "owner": "owner", "repo": "repo", @@ -2066,7 +2066,7 @@ func Test_CreatePullRequest(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedPR *github.PullRequest expectedErrMsg string @@ -2074,7 +2074,7 @@ func Test_CreatePullRequest(t *testing.T) { { name: "successful PR creation", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PostReposPullsByOwnerByRepo: expectRequestBody(t, map[string]interface{}{ + PostReposPullsByOwnerByRepo: expectRequestBody(t, map[string]any{ "title": "Test PR", "body": "This is a test PR", "head": "feature-branch", @@ -2085,7 +2085,7 @@ func Test_CreatePullRequest(t *testing.T) { mockResponse(t, http.StatusCreated, mockPR), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "title": "Test PR", @@ -2101,7 +2101,7 @@ func Test_CreatePullRequest(t *testing.T) { { name: "missing required parameter", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{}), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", // missing title, head, base @@ -2117,7 +2117,7 @@ func Test_CreatePullRequest(t *testing.T) { _, _ = w.Write([]byte(`{"message":"Validation failed","errors":[{"resource":"PullRequest","code":"invalid"}]}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "title": "Test PR", @@ -2172,6 +2172,82 @@ func Test_CreatePullRequest(t *testing.T) { } } +// Test_CreatePullRequest_InsidersMode_UIGate verifies the insiders mode UI gate +// behavior: UI clients get a form message, non-UI clients execute directly. +func Test_CreatePullRequest_InsidersMode_UIGate(t *testing.T) { + t.Parallel() + + mockPR := &github.PullRequest{ + Number: github.Ptr(42), + Title: github.Ptr("Test PR"), + HTMLURL: github.Ptr("https://github.com/owner/repo/pull/42"), + Head: &github.PullRequestBranch{SHA: github.Ptr("abc"), Ref: github.Ptr("feature")}, + Base: &github.PullRequestBranch{SHA: github.Ptr("def"), Ref: github.Ptr("main")}, + User: &github.User{Login: github.Ptr("testuser")}, + } + + serverTool := CreatePullRequest(translations.NullTranslationHelper) + + client := github.NewClient(MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ + PostReposPullsByOwnerByRepo: mockResponse(t, http.StatusCreated, mockPR), + })) + + deps := BaseDeps{ + Client: client, + GQLClient: githubv4.NewClient(nil), + Flags: FeatureFlags{InsidersMode: true}, + } + handler := serverTool.Handler(deps) + + t.Run("UI client without _ui_submitted returns form message", func(t *testing.T) { + request := createMCPRequestWithSession(t, "Visual Studio Code", map[string]any{ + "owner": "owner", + "repo": "repo", + "title": "Test PR", + "head": "feature", + "base": "main", + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + require.NoError(t, err) + + textContent := getTextResult(t, result) + assert.Contains(t, textContent.Text, "Ready to create a pull request") + }) + + t.Run("UI client with _ui_submitted executes directly", func(t *testing.T) { + request := createMCPRequestWithSession(t, "Visual Studio Code", map[string]any{ + "owner": "owner", + "repo": "repo", + "title": "Test PR", + "head": "feature", + "base": "main", + "_ui_submitted": true, + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + require.NoError(t, err) + + textContent := getTextResult(t, result) + assert.Contains(t, textContent.Text, "https://github.com/owner/repo/pull/42", + "tool should return the created PR URL") + }) + + t.Run("non-UI client executes directly without _ui_submitted", func(t *testing.T) { + request := createMCPRequest(map[string]any{ + "owner": "owner", + "repo": "repo", + "title": "Test PR", + "head": "feature", + "base": "main", + }) + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + require.NoError(t, err) + + textContent := getTextResult(t, result) + assert.Contains(t, textContent.Text, "https://github.com/owner/repo/pull/42", + "non-UI client should execute directly") + }) +} + func TestCreateAndSubmitPullRequestReview(t *testing.T) { t.Parallel() @@ -3227,3 +3303,167 @@ func getLatestPendingReviewQuery(p getLatestPendingReviewQueryParams) githubv4mo ), ) } + +func TestAddReplyToPullRequestComment(t *testing.T) { + t.Parallel() + + // Verify tool definition once + serverTool := AddReplyToPullRequestComment(translations.NullTranslationHelper) + tool := serverTool.Tool + require.NoError(t, toolsnaps.Test(tool.Name, tool)) + + assert.Equal(t, "add_reply_to_pull_request_comment", tool.Name) + assert.NotEmpty(t, tool.Description) + schema := tool.InputSchema.(*jsonschema.Schema) + assert.Contains(t, schema.Properties, "owner") + assert.Contains(t, schema.Properties, "repo") + assert.Contains(t, schema.Properties, "pullNumber") + assert.Contains(t, schema.Properties, "commentId") + assert.Contains(t, schema.Properties, "body") + assert.ElementsMatch(t, schema.Required, []string{"owner", "repo", "pullNumber", "commentId", "body"}) + + // Setup mock reply comment for success case + mockReplyComment := &github.PullRequestComment{ + ID: github.Ptr(int64(456)), + Body: github.Ptr("This is a reply to the comment"), + InReplyTo: github.Ptr(int64(123)), + HTMLURL: github.Ptr("https://github.com/owner/repo/pull/42#discussion_r456"), + User: &github.User{ + Login: github.Ptr("responder"), + }, + CreatedAt: &github.Timestamp{Time: time.Now()}, + UpdatedAt: &github.Timestamp{Time: time.Now()}, + } + + tests := []struct { + name string + mockedClient *http.Client + requestArgs map[string]any + expectToolError bool + expectedToolErrMsg string + }{ + { + name: "successful reply to pull request comment", + requestArgs: map[string]any{ + "owner": "owner", + "repo": "repo", + "pullNumber": float64(42), + "commentId": float64(123), + "body": "This is a reply to the comment", + }, + mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ + PostReposPullsCommentsByOwnerByRepoByPullNumber: func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusCreated) + responseData, _ := json.Marshal(mockReplyComment) + _, _ = w.Write(responseData) + }, + }), + }, + { + name: "missing required parameter owner", + requestArgs: map[string]any{ + "repo": "repo", + "pullNumber": float64(42), + "commentId": float64(123), + "body": "This is a reply to the comment", + }, + expectToolError: true, + expectedToolErrMsg: "missing required parameter: owner", + }, + { + name: "missing required parameter repo", + requestArgs: map[string]any{ + "owner": "owner", + "pullNumber": float64(42), + "commentId": float64(123), + "body": "This is a reply to the comment", + }, + expectToolError: true, + expectedToolErrMsg: "missing required parameter: repo", + }, + { + name: "missing required parameter pullNumber", + requestArgs: map[string]any{ + "owner": "owner", + "repo": "repo", + "commentId": float64(123), + "body": "This is a reply to the comment", + }, + expectToolError: true, + expectedToolErrMsg: "missing required parameter: pullNumber", + }, + { + name: "missing required parameter commentId", + requestArgs: map[string]any{ + "owner": "owner", + "repo": "repo", + "pullNumber": float64(42), + "body": "This is a reply to the comment", + }, + expectToolError: true, + expectedToolErrMsg: "missing required parameter: commentId", + }, + { + name: "missing required parameter body", + requestArgs: map[string]any{ + "owner": "owner", + "repo": "repo", + "pullNumber": float64(42), + "commentId": float64(123), + }, + expectToolError: true, + expectedToolErrMsg: "missing required parameter: body", + }, + { + name: "API error when adding reply", + mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ + PostReposPullsCommentsByOwnerByRepoByPullNumber: func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusNotFound) + _, _ = w.Write([]byte(`{"message": "Not Found"}`)) + }, + }), + requestArgs: map[string]any{ + "owner": "owner", + "repo": "repo", + "pullNumber": float64(42), + "commentId": float64(123), + "body": "This is a reply to the comment", + }, + expectToolError: true, + expectedToolErrMsg: "failed to add reply to pull request comment", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + // Setup client with mock + client := github.NewClient(tc.mockedClient) + serverTool := AddReplyToPullRequestComment(translations.NullTranslationHelper) + deps := BaseDeps{ + Client: client, + } + handler := serverTool.Handler(deps) + + // Create call request + request := createMCPRequest(tc.requestArgs) + + // Call handler + result, err := handler(ContextWithDeps(context.Background(), deps), &request) + require.NoError(t, err) + + if tc.expectToolError { + require.True(t, result.IsError) + errorContent := getErrorResult(t, result) + assert.Contains(t, errorContent.Text, tc.expectedToolErrMsg) + return + } + + // Parse the result and verify it's not an error + require.False(t, result.IsError) + textContent := getTextResult(t, result) + assert.Contains(t, textContent.Text, "This is a reply to the comment") + }) + } +} diff --git a/pkg/github/repositories.go b/pkg/github/repositories.go index f6203f39fc..4433fe64cb 100644 --- a/pkg/github/repositories.go +++ b/pkg/github/repositories.go @@ -2,6 +2,7 @@ package github import ( "context" + "encoding/base64" "encoding/json" "fmt" "io" @@ -15,7 +16,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) @@ -488,13 +489,14 @@ If the SHA is not provided, the tool will attempt to acquire it by fetching the return ghErrors.NewGitHubAPIStatusErrorResponse(ctx, "failed to create/update file", resp, body), nil, nil } - r, err := json.Marshal(fileContent) - if err != nil { - return nil, nil, fmt.Errorf("failed to marshal response: %w", err) - } + minimalResponse := convertToMinimalFileContentResponse(fileContent) // Warn if file was updated without SHA validation (blind update) if sha == "" && previousSHA != "" { + warning, err := json.Marshal(minimalResponse) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal response: %w", err) + } return utils.NewToolResultText(fmt.Sprintf( "Warning: File updated without SHA validation. Previous file SHA was %s. "+ `Verify no unintended changes were overwritten: @@ -503,10 +505,10 @@ If the SHA is not provided, the tool will attempt to acquire it by fetching the 3. Revert changes if shas do not match. %s`, - previousSHA, path, string(r))), nil, nil + previousSHA, path, string(warning))), nil, nil } - return utils.NewToolResultText(string(r)), nil, nil + return MarshalledTextResult(minimalResponse), nil, nil }, ) } @@ -715,86 +717,72 @@ func GetFileContents(t translations.TranslationHelperFunc) inventory.ServerTool if fileContent != nil && fileContent.SHA != nil { fileSHA = *fileContent.SHA - - rawClient, err := deps.GetRawClient(ctx) + fileSize := fileContent.GetSize() + // Build resource URI for the file using URI templates + pathParts := strings.Split(path, "/") + resourceURI, err := expandRepoResourceURI(owner, repo, sha, ref, pathParts) if err != nil { - return utils.NewToolResultError("failed to get GitHub raw content client"), nil, nil + return utils.NewToolResultError("failed to build resource URI"), nil, nil } - resp, err := rawClient.GetRawContent(ctx, owner, repo, path, rawOpts) - if err != nil { - return utils.NewToolResultError("failed to get raw repository content"), nil, nil + + // main branch ref passed in ref parameter but it doesn't exist - default branch was used + var successNote string + if fallbackUsed { + successNote = fmt.Sprintf(" Note: the provided ref '%s' does not exist, default branch '%s' was used instead.", originalRef, rawOpts.Ref) } - defer func() { - _ = resp.Body.Close() - }() - if resp.StatusCode == http.StatusOK { - // If the raw content is found, return it directly - body, err := io.ReadAll(resp.Body) - if err != nil { - return ghErrors.NewGitHubRawAPIErrorResponse(ctx, "failed to get raw repository content", resp, err), nil, nil - } - contentType := resp.Header.Get("Content-Type") - - var resourceURI string - switch { - case sha != "": - resourceURI, err = url.JoinPath("repo://", owner, repo, "sha", sha, "contents", path) - if err != nil { - return nil, nil, fmt.Errorf("failed to create resource URI: %w", err) - } - case ref != "": - resourceURI, err = url.JoinPath("repo://", owner, repo, ref, "contents", path) - if err != nil { - return nil, nil, fmt.Errorf("failed to create resource URI: %w", err) - } - default: - resourceURI, err = url.JoinPath("repo://", owner, repo, "contents", path) - if err != nil { - return nil, nil, fmt.Errorf("failed to create resource URI: %w", err) - } + // For files >= 1MB, return a ResourceLink instead of content + const maxContentSize = 1024 * 1024 // 1MB + if fileSize >= maxContentSize { + size := int64(fileSize) + resourceLink := &mcp.ResourceLink{ + URI: resourceURI, + Name: fileContent.GetName(), + Title: fmt.Sprintf("File: %s", path), + Size: &size, } + return utils.NewToolResultResourceLink( + fmt.Sprintf("File %s is too large to display (%d bytes). Use the download URL to fetch the content: %s (SHA: %s)%s", + path, fileSize, fileContent.GetDownloadURL(), fileSHA, successNote), + resourceLink), nil, nil + } - // main branch ref passed in ref parameter but it doesn't exist - default branch was used - var successNote string - if fallbackUsed { - successNote = fmt.Sprintf(" Note: the provided ref '%s' does not exist, default branch '%s' was used instead.", originalRef, rawOpts.Ref) - } + // For files < 1MB, get content directly from Contents API + content, err := fileContent.GetContent() + if err != nil { + return utils.NewToolResultError(fmt.Sprintf("failed to decode file content: %s", err)), nil, nil + } - // Determine if content is text or binary - isTextContent := strings.HasPrefix(contentType, "text/") || - contentType == "application/json" || - contentType == "application/xml" || - strings.HasSuffix(contentType, "+json") || - strings.HasSuffix(contentType, "+xml") - - if isTextContent { - result := &mcp.ResourceContents{ - URI: resourceURI, - Text: string(body), - MIMEType: contentType, - } - // Include SHA in the result metadata - if fileSHA != "" { - return utils.NewToolResultResource(fmt.Sprintf("successfully downloaded text file (SHA: %s)", fileSHA)+successNote, result), nil, nil - } - return utils.NewToolResultResource("successfully downloaded text file"+successNote, result), nil, nil - } + // Detect content type from the actual content bytes, + // mirroring the original approach of using the Content-Type header + // from the raw API response. + contentBytes := []byte(content) + contentType := http.DetectContentType(contentBytes) + // Determine if content is text or binary based on detected content type + isTextContent := strings.HasPrefix(contentType, "text/") || + contentType == "application/json" || + contentType == "application/xml" || + strings.HasSuffix(contentType, "+json") || + strings.HasSuffix(contentType, "+xml") + + if isTextContent { result := &mcp.ResourceContents{ URI: resourceURI, - Blob: body, + Text: content, MIMEType: contentType, } - // Include SHA in the result metadata - if fileSHA != "" { - return utils.NewToolResultResource(fmt.Sprintf("successfully downloaded binary file (SHA: %s)", fileSHA)+successNote, result), nil, nil - } - return utils.NewToolResultResource("successfully downloaded binary file"+successNote, result), nil, nil + return utils.NewToolResultResource(fmt.Sprintf("successfully downloaded text file (SHA: %s)%s", fileSHA, successNote), result), nil, nil } - // Raw API call failed - return matchFiles(ctx, client, owner, repo, ref, path, rawOpts, resp.StatusCode) + // Binary content - encode as base64 blob + blobContent := base64.StdEncoding.EncodeToString(contentBytes) + result := &mcp.ResourceContents{ + URI: resourceURI, + Blob: []byte(blobContent), + MIMEType: contentType, + } + return utils.NewToolResultResource(fmt.Sprintf("successfully downloaded binary file (SHA: %s)%s", fileSHA, successNote), result), nil, nil } else if dirContent != nil { // file content or file SHA is nil which means it's a directory r, err := json.Marshal(dirContent) @@ -1078,7 +1066,7 @@ func DeleteFile(t translations.TranslationHelperFunc) inventory.ServerTool { } // Create a response similar to what the DeleteFile API would return - response := map[string]interface{}{ + response := map[string]any{ "commit": newCommit, "content": nil, } @@ -1278,7 +1266,7 @@ func PushFiles(t translations.TranslationHelperFunc) inventory.ServerTool { } // Parse files parameter - this should be an array of objects with path and content - filesObj, ok := args["files"].([]interface{}) + filesObj, ok := args["files"].([]any) if !ok { return utils.NewToolResultError("files parameter must be an array of objects with path and content"), nil, nil } @@ -1360,7 +1348,7 @@ func PushFiles(t translations.TranslationHelperFunc) inventory.ServerTool { var entries []*github.TreeEntry for _, file := range filesObj { - fileMap, ok := file.(map[string]interface{}) + fileMap, ok := file.(map[string]any) if !ok { return utils.NewToolResultError("each file must be an object with path and content"), nil, nil } diff --git a/pkg/github/repositories_helper.go b/pkg/github/repositories_helper.go index de5065d480..a347ebdd6c 100644 --- a/pkg/github/repositories_helper.go +++ b/pkg/github/repositories_helper.go @@ -10,7 +10,7 @@ import ( ghErrors "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/raw" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/repositories_test.go b/pkg/github/repositories_test.go index d91af8851b..76628283d2 100644 --- a/pkg/github/repositories_test.go +++ b/pkg/github/repositories_test.go @@ -14,7 +14,7 @@ import ( "github.com/github/github-mcp-server/pkg/raw" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/stretchr/testify/assert" @@ -64,9 +64,9 @@ func Test_GetFileContents(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool - expectedResult interface{} + expectedResult any expectedErrMsg string expectStatus int expectedMsg string // optional: expected message text to verify in result @@ -78,21 +78,22 @@ func Test_GetFileContents(t *testing.T) { GetReposByOwnerByRepo: mockResponse(t, http.StatusOK, "{\"name\": \"repo\", \"default_branch\": \"main\"}"), GetReposContentsByOwnerByRepoByPath: func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) + // Base64 encode the content as GitHub API does + encodedContent := base64.StdEncoding.EncodeToString(mockRawContent) fileContent := &github.RepositoryContent{ - Name: github.Ptr("README.md"), - Path: github.Ptr("README.md"), - SHA: github.Ptr("abc123"), - Type: github.Ptr("file"), + Name: github.Ptr("README.md"), + Path: github.Ptr("README.md"), + SHA: github.Ptr("abc123"), + Type: github.Ptr("file"), + Content: github.Ptr(encodedContent), + Size: github.Ptr(len(mockRawContent)), + Encoding: github.Ptr("base64"), } contentBytes, _ := json.Marshal(fileContent) _, _ = w.Write(contentBytes) }, - GetRawReposContentsByOwnerByRepoByBranchByPath: func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Content-Type", "text/markdown") - _, _ = w.Write(mockRawContent) - }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "README.md", @@ -102,31 +103,33 @@ func Test_GetFileContents(t *testing.T) { expectedResult: mcp.ResourceContents{ URI: "repo://owner/repo/refs/heads/main/contents/README.md", Text: "# Test Repository\n\nThis is a test repository.", - MIMEType: "text/markdown", + MIMEType: "text/plain; charset=utf-8", }, }, { - name: "successful file blob content fetch", + name: "successful binary file content fetch (PNG)", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposGitRefByOwnerByRepoByRef: mockResponse(t, http.StatusOK, "{\"ref\": \"refs/heads/main\", \"object\": {\"sha\": \"\"}}"), GetReposByOwnerByRepo: mockResponse(t, http.StatusOK, "{\"name\": \"repo\", \"default_branch\": \"main\"}"), GetReposContentsByOwnerByRepoByPath: func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) + // PNG magic bytes followed by some data + pngContent := []byte("\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01") + encodedContent := base64.StdEncoding.EncodeToString(pngContent) fileContent := &github.RepositoryContent{ - Name: github.Ptr("test.png"), - Path: github.Ptr("test.png"), - SHA: github.Ptr("def456"), - Type: github.Ptr("file"), + Name: github.Ptr("test.png"), + Path: github.Ptr("test.png"), + SHA: github.Ptr("def456"), + Type: github.Ptr("file"), + Content: github.Ptr(encodedContent), + Size: github.Ptr(len(pngContent)), + Encoding: github.Ptr("base64"), } contentBytes, _ := json.Marshal(fileContent) _, _ = w.Write(contentBytes) }, - GetRawReposContentsByOwnerByRepoByBranchByPath: func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Content-Type", "image/png") - _, _ = w.Write(mockRawContent) - }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "test.png", @@ -135,32 +138,34 @@ func Test_GetFileContents(t *testing.T) { expectError: false, expectedResult: mcp.ResourceContents{ URI: "repo://owner/repo/refs/heads/main/contents/test.png", - Blob: mockRawContent, + Blob: []byte(base64.StdEncoding.EncodeToString([]byte("\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01"))), MIMEType: "image/png", }, }, { - name: "successful PDF file content fetch", + name: "successful binary file content fetch (PDF)", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposGitRefByOwnerByRepoByRef: mockResponse(t, http.StatusOK, "{\"ref\": \"refs/heads/main\", \"object\": {\"sha\": \"\"}}"), GetReposByOwnerByRepo: mockResponse(t, http.StatusOK, "{\"name\": \"repo\", \"default_branch\": \"main\"}"), GetReposContentsByOwnerByRepoByPath: func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) + // PDF magic bytes + pdfContent := []byte("%PDF-1.4 fake pdf content") + encodedContent := base64.StdEncoding.EncodeToString(pdfContent) fileContent := &github.RepositoryContent{ - Name: github.Ptr("document.pdf"), - Path: github.Ptr("document.pdf"), - SHA: github.Ptr("pdf123"), - Type: github.Ptr("file"), + Name: github.Ptr("document.pdf"), + Path: github.Ptr("document.pdf"), + SHA: github.Ptr("pdf123"), + Type: github.Ptr("file"), + Content: github.Ptr(encodedContent), + Size: github.Ptr(len(pdfContent)), + Encoding: github.Ptr("base64"), } contentBytes, _ := json.Marshal(fileContent) _, _ = w.Write(contentBytes) }, - GetRawReposContentsByOwnerByRepoByBranchByPath: func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Content-Type", "application/pdf") - _, _ = w.Write(mockRawContent) - }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "document.pdf", @@ -169,7 +174,7 @@ func Test_GetFileContents(t *testing.T) { expectError: false, expectedResult: mcp.ResourceContents{ URI: "repo://owner/repo/refs/heads/main/contents/document.pdf", - Blob: mockRawContent, + Blob: []byte(base64.StdEncoding.EncodeToString([]byte("%PDF-1.4 fake pdf content"))), MIMEType: "application/pdf", }, }, @@ -185,7 +190,7 @@ func Test_GetFileContents(t *testing.T) { mockResponse(t, http.StatusNotFound, nil), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "src/", @@ -200,21 +205,22 @@ func Test_GetFileContents(t *testing.T) { GetReposByOwnerByRepo: mockResponse(t, http.StatusOK, "{\"name\": \"repo\", \"default_branch\": \"main\"}"), GetReposContentsByOwnerByRepoByPath: func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) + // Base64 encode the content as GitHub API does + encodedContent := base64.StdEncoding.EncodeToString(mockRawContent) fileContent := &github.RepositoryContent{ - Name: github.Ptr("README.md"), - Path: github.Ptr("README.md"), - SHA: github.Ptr("abc123"), - Type: github.Ptr("file"), + Name: github.Ptr("README.md"), + Path: github.Ptr("README.md"), + SHA: github.Ptr("abc123"), + Type: github.Ptr("file"), + Content: github.Ptr(encodedContent), + Size: github.Ptr(len(mockRawContent)), + Encoding: github.Ptr("base64"), } contentBytes, _ := json.Marshal(fileContent) _, _ = w.Write(contentBytes) }, - GetRawReposContentsByOwnerByRepoByBranchByPath: func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Content-Type", "text/markdown") - _, _ = w.Write(mockRawContent) - }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "/README.md", @@ -224,7 +230,7 @@ func Test_GetFileContents(t *testing.T) { expectedResult: mcp.ResourceContents{ URI: "repo://owner/repo/refs/heads/main/contents/README.md", Text: "# Test Repository\n\nThis is a test repository.", - MIMEType: "text/markdown", + MIMEType: "text/plain; charset=utf-8", }, }, { @@ -239,7 +245,7 @@ func Test_GetFileContents(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) case strings.Contains(path, "heads/develop"): w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(`{"ref": "refs/heads/develop", "object": {"sha": "abc123def456", "type": "commit", "url": "https://api.github.com/repos/owner/repo/git/commits/abc123def456"}}`)) + _, _ = w.Write([]byte(`{"ref": "refs/heads/develop", "object": {"sha": "abc123def456abc123def456abc123def456abc1", "type": "commit", "url": "https://api.github.com/repos/owner/repo/git/commits/abc123def456abc123def456abc123def456abc1"}}`)) default: w.WriteHeader(http.StatusNotFound) _, _ = w.Write([]byte(`{"message": "Not Found"}`)) @@ -253,7 +259,7 @@ func Test_GetFileContents(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) case strings.Contains(path, "heads/develop"): w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(`{"ref": "refs/heads/develop", "object": {"sha": "abc123def456", "type": "commit", "url": "https://api.github.com/repos/owner/repo/git/commits/abc123def456"}}`)) + _, _ = w.Write([]byte(`{"ref": "refs/heads/develop", "object": {"sha": "abc123def456abc123def456abc123def456abc1", "type": "commit", "url": "https://api.github.com/repos/owner/repo/git/commits/abc123def456abc123def456abc123def456abc1"}}`)) default: w.WriteHeader(http.StatusNotFound) _, _ = w.Write([]byte(`{"message": "Not Found"}`)) @@ -267,7 +273,7 @@ func Test_GetFileContents(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) case strings.Contains(path, "heads/develop"): w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(`{"ref": "refs/heads/develop", "object": {"sha": "abc123def456", "type": "commit", "url": "https://api.github.com/repos/owner/repo/git/commits/abc123def456"}}`)) + _, _ = w.Write([]byte(`{"ref": "refs/heads/develop", "object": {"sha": "abc123def456abc123def456abc123def456abc1", "type": "commit", "url": "https://api.github.com/repos/owner/repo/git/commits/abc123def456abc123def456abc123def456abc1"}}`)) default: w.WriteHeader(http.StatusNotFound) _, _ = w.Write([]byte(`{"message": "Not Found"}`)) @@ -279,33 +285,26 @@ func Test_GetFileContents(t *testing.T) { }, "GET /repos/owner/repo/git/ref/heads/develop": func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(`{"ref": "refs/heads/develop", "object": {"sha": "abc123def456", "type": "commit", "url": "https://api.github.com/repos/owner/repo/git/commits/abc123def456"}}`)) + _, _ = w.Write([]byte(`{"ref": "refs/heads/develop", "object": {"sha": "abc123def456abc123def456abc123def456abc1", "type": "commit", "url": "https://api.github.com/repos/owner/repo/git/commits/abc123def456abc123def456abc123def456abc1"}}`)) }, GetReposContentsByOwnerByRepoByPath: func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) + // Base64 encode the content as GitHub API does + encodedContent := base64.StdEncoding.EncodeToString(mockRawContent) fileContent := &github.RepositoryContent{ - Name: github.Ptr("README.md"), - Path: github.Ptr("README.md"), - SHA: github.Ptr("abc123"), - Type: github.Ptr("file"), + Name: github.Ptr("README.md"), + Path: github.Ptr("README.md"), + SHA: github.Ptr("abc123"), + Type: github.Ptr("file"), + Content: github.Ptr(encodedContent), + Size: github.Ptr(len(mockRawContent)), + Encoding: github.Ptr("base64"), } contentBytes, _ := json.Marshal(fileContent) _, _ = w.Write(contentBytes) }, - "GET /owner/repo/refs/heads/develop/README.md": func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Content-Type", "text/markdown") - _, _ = w.Write(mockRawContent) - }, - "GET /owner/repo/refs%2Fheads%2Fdevelop/README.md": func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Content-Type", "text/markdown") - _, _ = w.Write(mockRawContent) - }, - "GET /owner/repo/abc123def456/README.md": func(w http.ResponseWriter, _ *http.Request) { - w.Header().Set("Content-Type", "text/markdown") - _, _ = w.Write(mockRawContent) - }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "README.md", @@ -313,12 +312,45 @@ func Test_GetFileContents(t *testing.T) { }, expectError: false, expectedResult: mcp.ResourceContents{ - URI: "repo://owner/repo/abc123def456/contents/README.md", + URI: "repo://owner/repo/sha/abc123def456abc123def456abc123def456abc1/contents/README.md", Text: "# Test Repository\n\nThis is a test repository.", - MIMEType: "text/markdown", + MIMEType: "text/plain; charset=utf-8", }, expectedMsg: " Note: the provided ref 'main' does not exist, default branch 'refs/heads/develop' was used instead.", }, + { + name: "large file returns ResourceLink", + mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ + GetReposGitRefByOwnerByRepoByRef: mockResponse(t, http.StatusOK, "{\"ref\": \"refs/heads/main\", \"object\": {\"sha\": \"\"}}"), + GetReposByOwnerByRepo: mockResponse(t, http.StatusOK, "{\"name\": \"repo\", \"default_branch\": \"main\"}"), + GetReposContentsByOwnerByRepoByPath: func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + // File larger than 1MB - Contents API returns metadata but no content + fileContent := &github.RepositoryContent{ + Name: github.Ptr("large-file.bin"), + Path: github.Ptr("large-file.bin"), + SHA: github.Ptr("largesha123"), + Type: github.Ptr("file"), + Size: github.Ptr(2 * 1024 * 1024), // 2MB + DownloadURL: github.Ptr("https://raw.githubusercontent.com/owner/repo/main/large-file.bin"), + } + contentBytes, _ := json.Marshal(fileContent) + _, _ = w.Write(contentBytes) + }, + }), + requestArgs: map[string]any{ + "owner": "owner", + "repo": "repo", + "path": "large-file.bin", + "ref": "refs/heads/main", + }, + expectError: false, + expectedResult: &mcp.ResourceLink{ + URI: "repo://owner/repo/refs/heads/main/contents/large-file.bin", + Name: "large-file.bin", + Title: "File: large-file.bin", + }, + }, { name: "content fetch fails", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ @@ -332,7 +364,7 @@ func Test_GetFileContents(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "nonexistent.md", @@ -395,6 +427,14 @@ func Test_GetFileContents(t *testing.T) { assert.Equal(t, *expected[i].Path, *content.Path) assert.Equal(t, *expected[i].Type, *content.Type) } + case *mcp.ResourceLink: + // Large file returns a ResourceLink + require.Len(t, result.Content, 2) + resourceLink, ok := result.Content[1].(*mcp.ResourceLink) + require.True(t, ok, "expected Content[1] to be ResourceLink") + assert.Equal(t, expected.URI, resourceLink.URI) + assert.Equal(t, expected.Name, resourceLink.Name) + assert.Equal(t, expected.Title, resourceLink.Title) case mcp.TextContent: textContent := getErrorResult(t, result) require.Equal(t, textContent, expected) @@ -436,7 +476,7 @@ func Test_ForkRepository(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedRepo *github.Repository expectedErrMsg string @@ -446,7 +486,7 @@ func Test_ForkRepository(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ PostReposForksByOwnerByRepo: mockResponse(t, http.StatusAccepted, mockForkedRepo), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -461,7 +501,7 @@ func Test_ForkRepository(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Forbidden"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -546,7 +586,7 @@ func Test_CreateBranch(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedRef *github.Reference expectedErrMsg string @@ -558,7 +598,7 @@ func Test_CreateBranch(t *testing.T) { "GET /repos/owner/repo/git/ref/heads/main": mockResponse(t, http.StatusOK, mockSourceRef), PostReposGitRefsByOwnerByRepo: mockResponse(t, http.StatusCreated, mockCreatedRef), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "new-feature", @@ -573,14 +613,14 @@ func Test_CreateBranch(t *testing.T) { GetReposByOwnerByRepo: mockResponse(t, http.StatusOK, mockRepo), GetReposGitRefByOwnerByRepoByRef: mockResponse(t, http.StatusOK, mockSourceRef), "GET /repos/owner/repo/git/ref/heads/main": mockResponse(t, http.StatusOK, mockSourceRef), - PostReposGitRefsByOwnerByRepo: expectRequestBody(t, map[string]interface{}{ + PostReposGitRefsByOwnerByRepo: expectRequestBody(t, map[string]any{ "ref": "refs/heads/new-feature", "sha": "abc123def456", }).andThen( mockResponse(t, http.StatusCreated, mockCreatedRef), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "new-feature", @@ -596,7 +636,7 @@ func Test_CreateBranch(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Repository not found"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "nonexistent-repo", "branch": "new-feature", @@ -612,7 +652,7 @@ func Test_CreateBranch(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Reference not found"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "new-feature", @@ -631,7 +671,7 @@ func Test_CreateBranch(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Reference already exists"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "existing-branch", @@ -732,7 +772,7 @@ func Test_GetCommit(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedCommit *github.RepositoryCommit expectedErrMsg string @@ -742,7 +782,7 @@ func Test_GetCommit(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposCommitsByOwnerByRepoByRef: mockResponse(t, http.StatusOK, mockCommit), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "sha": "abc123def456", @@ -758,7 +798,7 @@ func Test_GetCommit(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "sha": "nonexistent-sha", @@ -908,7 +948,7 @@ func Test_ListCommits(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedCommits []*github.RepositoryCommit expectedErrMsg string @@ -918,7 +958,7 @@ func Test_ListCommits(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposCommitsByOwnerByRepo: mockResponse(t, http.StatusOK, mockCommits), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -937,7 +977,7 @@ func Test_ListCommits(t *testing.T) { mockResponse(t, http.StatusOK, mockCommits), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "sha": "main", @@ -956,7 +996,7 @@ func Test_ListCommits(t *testing.T) { mockResponse(t, http.StatusOK, mockCommits), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "page": float64(2), @@ -973,7 +1013,7 @@ func Test_ListCommits(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "nonexistent-repo", }, @@ -1080,7 +1120,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedContent *github.RepositoryContentResponse expectedErrMsg string @@ -1088,14 +1128,14 @@ func Test_CreateOrUpdateFile(t *testing.T) { { name: "successful file creation", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]interface{}{ + PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]any{ "message": "Add example file", "content": "IyBFeGFtcGxlCgpUaGlzIGlzIGFuIGV4YW1wbGUgZmlsZS4=", // Base64 encoded content "branch": "main", }).andThen( mockResponse(t, http.StatusOK, mockFileResponse), ), - "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]interface{}{ + "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]any{ "message": "Add example file", "content": "IyBFeGFtcGxlCgpUaGlzIGlzIGFuIGV4YW1wbGUgZmlsZS4=", // Base64 encoded content "branch": "main", @@ -1103,7 +1143,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { mockResponse(t, http.StatusOK, mockFileResponse), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -1117,7 +1157,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { { name: "successful file update with SHA", mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ - PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]interface{}{ + PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]any{ "message": "Update example file", "content": "IyBVcGRhdGVkIEV4YW1wbGUKClRoaXMgZmlsZSBoYXMgYmVlbiB1cGRhdGVkLg==", // Base64 encoded content "branch": "main", @@ -1125,7 +1165,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { }).andThen( mockResponse(t, http.StatusOK, mockFileResponse), ), - "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]interface{}{ + "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]any{ "message": "Update example file", "content": "IyBVcGRhdGVkIEV4YW1wbGUKClRoaXMgZmlsZSBoYXMgYmVlbiB1cGRhdGVkLg==", // Base64 encoded content "branch": "main", @@ -1134,7 +1174,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { mockResponse(t, http.StatusOK, mockFileResponse), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -1158,7 +1198,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Invalid request"}`)) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -1190,7 +1230,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { w.Header().Set("ETag", `"abc123def456"`) } }, - PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]interface{}{ + PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]any{ "message": "Update example file", "content": "IyBVcGRhdGVkIEV4YW1wbGUKClRoaXMgZmlsZSBoYXMgYmVlbiB1cGRhdGVkLg==", "branch": "main", @@ -1198,7 +1238,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { }).andThen( mockResponse(t, http.StatusOK, mockFileResponse), ), - "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]interface{}{ + "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]any{ "message": "Update example file", "content": "IyBVcGRhdGVkIEV4YW1wbGUKClRoaXMgZmlsZSBoYXMgYmVlbiB1cGRhdGVkLg==", "branch": "main", @@ -1207,7 +1247,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { mockResponse(t, http.StatusOK, mockFileResponse), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -1231,7 +1271,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { w.WriteHeader(http.StatusOK) }, }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -1249,7 +1289,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { "HEAD /repos/owner/repo/contents/docs/example.md": func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusNotFound) }, - PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]interface{}{ + PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]any{ "message": "Create new file", "content": "IyBOZXcgRmlsZQoKVGhpcyBpcyBhIG5ldyBmaWxlLg==", "branch": "main", @@ -1260,7 +1300,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { "HEAD /repos/{owner}/{repo}/contents/{path:.*}": func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusNotFound) }, - "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]interface{}{ + "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]any{ "message": "Create new file", "content": "IyBOZXcgRmlsZQoKVGhpcyBpcyBhIG5ldyBmaWxlLg==", "branch": "main", @@ -1269,7 +1309,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { mockResponse(t, http.StatusCreated, mockFileResponse), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -1288,7 +1328,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { w.Header().Set("ETag", `"existing123"`) w.WriteHeader(http.StatusOK) }, - PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]interface{}{ + PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]any{ "message": "Update without SHA", "content": "IyBVcGRhdGVkCgpVcGRhdGVkIHdpdGhvdXQgU0hBLg==", "branch": "main", @@ -1300,7 +1340,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { w.Header().Set("ETag", `"existing123"`) w.WriteHeader(http.StatusOK) }, - "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]interface{}{ + "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]any{ "message": "Update without SHA", "content": "IyBVcGRhdGVkCgpVcGRhdGVkIHdpdGhvdXQgU0hBLg==", "branch": "main", @@ -1309,7 +1349,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { mockResponse(t, http.StatusOK, mockFileResponse), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -1326,7 +1366,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { "HEAD /repos/owner/repo/contents/docs/example.md": func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusNotFound) }, - PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]interface{}{ + PutReposContentsByOwnerByRepoByPath: expectRequestBody(t, map[string]any{ "message": "Create new file", "content": "IyBOZXcgRmlsZQoKQ3JlYXRlZCB3aXRob3V0IFNIQQ==", "branch": "main", @@ -1336,7 +1376,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { "HEAD /repos/{owner}/{repo}/contents/{path:.*}": func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusNotFound) }, - "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]interface{}{ + "PUT /repos/{owner}/{repo}/contents/{path:.*}": expectRequestBody(t, map[string]any{ "message": "Create new file", "content": "IyBOZXcgRmlsZQoKQ3JlYXRlZCB3aXRob3V0IFNIQQ==", "branch": "main", @@ -1344,7 +1384,7 @@ func Test_CreateOrUpdateFile(t *testing.T) { mockResponse(t, http.StatusCreated, mockFileResponse), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -1394,18 +1434,27 @@ func Test_CreateOrUpdateFile(t *testing.T) { } // Unmarshal and verify the result - var returnedContent github.RepositoryContentResponse + var returnedContent MinimalFileContentResponse err = json.Unmarshal([]byte(textContent.Text), &returnedContent) require.NoError(t, err) // Verify content - assert.Equal(t, *tc.expectedContent.Content.Name, *returnedContent.Content.Name) - assert.Equal(t, *tc.expectedContent.Content.Path, *returnedContent.Content.Path) - assert.Equal(t, *tc.expectedContent.Content.SHA, *returnedContent.Content.SHA) + assert.Equal(t, tc.expectedContent.Content.GetName(), returnedContent.Content.Name) + assert.Equal(t, tc.expectedContent.Content.GetPath(), returnedContent.Content.Path) + assert.Equal(t, tc.expectedContent.Content.GetSHA(), returnedContent.Content.SHA) + assert.Equal(t, tc.expectedContent.Content.GetSize(), returnedContent.Content.Size) + assert.Equal(t, tc.expectedContent.Content.GetHTMLURL(), returnedContent.Content.HTMLURL) // Verify commit - assert.Equal(t, *tc.expectedContent.Commit.SHA, *returnedContent.Commit.SHA) - assert.Equal(t, *tc.expectedContent.Commit.Message, *returnedContent.Commit.Message) + assert.Equal(t, tc.expectedContent.Commit.GetSHA(), returnedContent.Commit.SHA) + assert.Equal(t, tc.expectedContent.Commit.GetMessage(), returnedContent.Commit.Message) + assert.Equal(t, tc.expectedContent.Commit.GetHTMLURL(), returnedContent.Commit.HTMLURL) + + // Verify commit author + require.NotNil(t, returnedContent.Commit.Author) + assert.Equal(t, tc.expectedContent.Commit.Author.GetName(), returnedContent.Commit.Author.Name) + assert.Equal(t, tc.expectedContent.Commit.Author.GetEmail(), returnedContent.Commit.Author.Email) + assert.NotEmpty(t, returnedContent.Commit.Author.Date) }) } } @@ -1443,7 +1492,7 @@ func Test_CreateRepository(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedRepo *github.Repository expectedErrMsg string @@ -1453,7 +1502,7 @@ func Test_CreateRepository(t *testing.T) { mockedClient: NewMockedHTTPClient( WithRequestMatchHandler( EndpointPattern("POST /user/repos"), - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "name": "test-repo", "description": "Test repository", "private": true, @@ -1463,7 +1512,7 @@ func Test_CreateRepository(t *testing.T) { ), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "name": "test-repo", "description": "Test repository", "private": true, @@ -1477,7 +1526,7 @@ func Test_CreateRepository(t *testing.T) { mockedClient: NewMockedHTTPClient( WithRequestMatchHandler( EndpointPattern("POST /orgs/testorg/repos"), - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "name": "test-repo", "description": "Test repository", "private": false, @@ -1487,7 +1536,7 @@ func Test_CreateRepository(t *testing.T) { ), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "name": "test-repo", "description": "Test repository", "organization": "testorg", @@ -1502,7 +1551,7 @@ func Test_CreateRepository(t *testing.T) { mockedClient: NewMockedHTTPClient( WithRequestMatchHandler( EndpointPattern("POST /user/repos"), - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "name": "test-repo", "auto_init": false, "description": "", @@ -1512,7 +1561,7 @@ func Test_CreateRepository(t *testing.T) { ), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "name": "test-repo", }, expectError: false, @@ -1529,7 +1578,7 @@ func Test_CreateRepository(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "name": "invalid-repo", }, expectError: true, @@ -1634,7 +1683,7 @@ func Test_PushFiles(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedRef *github.Reference expectedErrMsg string @@ -1655,16 +1704,16 @@ func Test_PushFiles(t *testing.T) { // Create tree WithRequestMatchHandler( PostReposGitTreesByOwnerByRepo, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "base_tree": "def456", - "tree": []interface{}{ - map[string]interface{}{ + "tree": []any{ + map[string]any{ "path": "README.md", "mode": "100644", "type": "blob", "content": "# Updated README\n\nThis is an updated README file.", }, - map[string]interface{}{ + map[string]any{ "path": "docs/example.md", "mode": "100644", "type": "blob", @@ -1678,10 +1727,10 @@ func Test_PushFiles(t *testing.T) { // Create commit WithRequestMatchHandler( PostReposGitCommitsByOwnerByRepo, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "message": "Update multiple files", "tree": "ghi789", - "parents": []interface{}{"abc123"}, + "parents": []any{"abc123"}, }).andThen( mockResponse(t, http.StatusCreated, mockNewCommit), ), @@ -1689,7 +1738,7 @@ func Test_PushFiles(t *testing.T) { // Update reference WithRequestMatchHandler( PatchReposGitRefsByOwnerByRepoByRef, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "sha": "jkl012", "force": false, }).andThen( @@ -1697,16 +1746,16 @@ func Test_PushFiles(t *testing.T) { ), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# Updated README\n\nThis is an updated README file.", }, - map[string]interface{}{ + map[string]any{ "path": "docs/example.md", "content": "# Example\n\nThis is an example file.", }, @@ -1721,7 +1770,7 @@ func Test_PushFiles(t *testing.T) { mockedClient: NewMockedHTTPClient( // No requests expected ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", @@ -1745,12 +1794,12 @@ func Test_PushFiles(t *testing.T) { mockCommit, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "content": "# Missing path", }, }, @@ -1773,12 +1822,12 @@ func Test_PushFiles(t *testing.T) { mockCommit, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", // Missing content }, @@ -1801,12 +1850,12 @@ func Test_PushFiles(t *testing.T) { mockResponse(t, http.StatusNotFound, nil), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "non-existent-branch", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# README", }, @@ -1830,12 +1879,12 @@ func Test_PushFiles(t *testing.T) { mockResponse(t, http.StatusNotFound, nil), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# README", }, @@ -1864,12 +1913,12 @@ func Test_PushFiles(t *testing.T) { mockResponse(t, http.StatusInternalServerError, nil), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# README", }, @@ -1893,7 +1942,7 @@ func Test_PushFiles(t *testing.T) { if callCount == 1 { // First call: empty repo w.WriteHeader(http.StatusConflict) - response := map[string]interface{}{ + response := map[string]any{ "message": "Git Repository is empty.", } _ = json.NewEncoder(w).Encode(response) @@ -1916,7 +1965,7 @@ func Test_PushFiles(t *testing.T) { WithRequestMatchHandler( PutReposContentsByOwnerByRepoByPath, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var body map[string]interface{} + var body map[string]any err := json.NewDecoder(r.Body).Decode(&body) require.NoError(t, err) require.Equal(t, "Initial commit", body["message"]) @@ -1950,12 +1999,12 @@ func Test_PushFiles(t *testing.T) { mockUpdatedRef, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# Initial README\n\nFirst commit to empty repository.", }, @@ -1979,7 +2028,7 @@ func Test_PushFiles(t *testing.T) { // First call: returns 409 Conflict for empty repo w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusConflict) - response := map[string]interface{}{ + response := map[string]any{ "message": "Git Repository is empty.", } _ = json.NewEncoder(w).Encode(response) @@ -2006,7 +2055,7 @@ func Test_PushFiles(t *testing.T) { WithRequestMatchHandler( PutReposContentsByOwnerByRepoByPath, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var body map[string]interface{} + var body map[string]any err := json.NewDecoder(r.Body).Decode(&body) require.NoError(t, err) require.Equal(t, "Initial commit", body["message"]) @@ -2048,22 +2097,22 @@ func Test_PushFiles(t *testing.T) { // Create tree with all user files WithRequestMatchHandler( PostReposGitTreesByOwnerByRepo, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "base_tree": "tree456", - "tree": []interface{}{ - map[string]interface{}{ + "tree": []any{ + map[string]any{ "path": "README.md", "mode": "100644", "type": "blob", "content": "# Project\n\nProject README", }, - map[string]interface{}{ + map[string]any{ "path": ".gitignore", "mode": "100644", "type": "blob", "content": "node_modules/\n*.log\n", }, - map[string]interface{}{ + map[string]any{ "path": "src/main.js", "mode": "100644", "type": "blob", @@ -2077,10 +2126,10 @@ func Test_PushFiles(t *testing.T) { // Create commit with all user files WithRequestMatchHandler( PostReposGitCommitsByOwnerByRepo, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "message": "Initial project setup", "tree": "ghi789", - "parents": []interface{}{"init456"}, + "parents": []any{"init456"}, }).andThen( mockResponse(t, http.StatusCreated, mockNewCommit), ), @@ -2088,7 +2137,7 @@ func Test_PushFiles(t *testing.T) { // Update reference WithRequestMatchHandler( PatchReposGitRefsByOwnerByRepoByRef, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "sha": "jkl012", "force": false, }).andThen( @@ -2096,20 +2145,20 @@ func Test_PushFiles(t *testing.T) { ), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# Project\n\nProject README", }, - map[string]interface{}{ + map[string]any{ "path": ".gitignore", "content": "node_modules/\n*.log\n", }, - map[string]interface{}{ + map[string]any{ "path": "src/main.js", "content": "console.log('Hello World');\n", }, @@ -2128,7 +2177,7 @@ func Test_PushFiles(t *testing.T) { http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusConflict) - response := map[string]interface{}{ + response := map[string]any{ "message": "Git Repository is empty.", } _ = json.NewEncoder(w).Encode(response) @@ -2147,12 +2196,12 @@ func Test_PushFiles(t *testing.T) { mockResponse(t, http.StatusInternalServerError, nil), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# README", }, @@ -2176,7 +2225,7 @@ func Test_PushFiles(t *testing.T) { // First call: returns 409 Conflict for empty repo w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusConflict) - response := map[string]interface{}{ + response := map[string]any{ "message": "Git Repository is empty.", } _ = json.NewEncoder(w).Encode(response) @@ -2203,12 +2252,12 @@ func Test_PushFiles(t *testing.T) { }, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# README", }, @@ -2227,7 +2276,7 @@ func Test_PushFiles(t *testing.T) { http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusConflict) - response := map[string]interface{}{ + response := map[string]any{ "message": "Git Repository is empty.", } _ = json.NewEncoder(w).Encode(response) @@ -2254,16 +2303,16 @@ func Test_PushFiles(t *testing.T) { mockResponse(t, http.StatusInternalServerError, nil), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "branch": "main", - "files": []interface{}{ - map[string]interface{}{ + "files": []any{ + map[string]any{ "path": "README.md", "content": "# README", }, - map[string]interface{}{ + map[string]any{ "path": "LICENSE", "content": "MIT", }, @@ -2356,14 +2405,14 @@ func Test_ListBranches(t *testing.T) { // Test cases tests := []struct { name string - args map[string]interface{} + args map[string]any mockResponses []MockBackendOption wantErr bool errContains string }{ { name: "success", - args: map[string]interface{}{ + args: map[string]any{ "owner": "owner", "repo": "repo", "page": float64(2), @@ -2378,7 +2427,7 @@ func Test_ListBranches(t *testing.T) { }, { name: "missing owner", - args: map[string]interface{}{ + args: map[string]any{ "repo": "repo", }, mockResponses: []MockBackendOption{}, @@ -2387,7 +2436,7 @@ func Test_ListBranches(t *testing.T) { }, { name: "missing repo", - args: map[string]interface{}{ + args: map[string]any{ "owner": "owner", }, mockResponses: []MockBackendOption{}, @@ -2489,7 +2538,7 @@ func Test_DeleteFile(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedCommitSHA string expectedErrMsg string @@ -2510,10 +2559,10 @@ func Test_DeleteFile(t *testing.T) { // Create tree WithRequestMatchHandler( PostReposGitTreesByOwnerByRepo, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "base_tree": "def456", - "tree": []interface{}{ - map[string]interface{}{ + "tree": []any{ + map[string]any{ "path": "docs/example.md", "mode": "100644", "type": "blob", @@ -2527,10 +2576,10 @@ func Test_DeleteFile(t *testing.T) { // Create commit WithRequestMatchHandler( PostReposGitCommitsByOwnerByRepo, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "message": "Delete example file", "tree": "ghi789", - "parents": []interface{}{"abc123"}, + "parents": []any{"abc123"}, }).andThen( mockResponse(t, http.StatusCreated, mockNewCommit), ), @@ -2538,7 +2587,7 @@ func Test_DeleteFile(t *testing.T) { // Update reference WithRequestMatchHandler( PatchReposGitRefsByOwnerByRepoByRef, - expectRequestBody(t, map[string]interface{}{ + expectRequestBody(t, map[string]any{ "sha": "jkl012", "force": false, }).andThen( @@ -2551,7 +2600,7 @@ func Test_DeleteFile(t *testing.T) { ), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/example.md", @@ -2572,7 +2621,7 @@ func Test_DeleteFile(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "path": "docs/nonexistent.md", @@ -2612,12 +2661,12 @@ func Test_DeleteFile(t *testing.T) { textContent := getTextResult(t, result) // Unmarshal and verify the result - var response map[string]interface{} + var response map[string]any err = json.Unmarshal([]byte(textContent.Text), &response) require.NoError(t, err) // Verify the response contains the expected commit - commit, ok := response["commit"].(map[string]interface{}) + commit, ok := response["commit"].(map[string]any) require.True(t, ok) commitSHA, ok := commit["sha"].(string) require.True(t, ok) @@ -2666,7 +2715,7 @@ func Test_ListTags(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedTags []*github.RepositoryTag expectedErrMsg string @@ -2684,7 +2733,7 @@ func Test_ListTags(t *testing.T) { ), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -2702,7 +2751,7 @@ func Test_ListTags(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -2792,7 +2841,7 @@ func Test_GetTag(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedTag *github.Tag expectedErrMsg string @@ -2819,7 +2868,7 @@ func Test_GetTag(t *testing.T) { ), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "tag": "v1.0.0", @@ -2838,7 +2887,7 @@ func Test_GetTag(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "tag": "v1.0.0", @@ -2861,7 +2910,7 @@ func Test_GetTag(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "tag": "v1.0.0", @@ -2945,7 +2994,7 @@ func Test_ListReleases(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult []*github.RepositoryRelease expectedErrMsg string @@ -2958,7 +3007,7 @@ func Test_ListReleases(t *testing.T) { mockReleases, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -2976,7 +3025,7 @@ func Test_ListReleases(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -3036,7 +3085,7 @@ func Test_GetLatestRelease(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult *github.RepositoryRelease expectedErrMsg string @@ -3049,7 +3098,7 @@ func Test_GetLatestRelease(t *testing.T) { mockRelease, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -3067,7 +3116,7 @@ func Test_GetLatestRelease(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -3133,7 +3182,7 @@ func Test_GetReleaseByTag(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult *github.RepositoryRelease expectedErrMsg string @@ -3146,7 +3195,7 @@ func Test_GetReleaseByTag(t *testing.T) { mockRelease, ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "tag": "v1.0.0", @@ -3157,7 +3206,7 @@ func Test_GetReleaseByTag(t *testing.T) { { name: "missing owner parameter", mockedClient: NewMockedHTTPClient(), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "repo": "repo", "tag": "v1.0.0", }, @@ -3167,7 +3216,7 @@ func Test_GetReleaseByTag(t *testing.T) { { name: "missing repo parameter", mockedClient: NewMockedHTTPClient(), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "tag": "v1.0.0", }, @@ -3177,7 +3226,7 @@ func Test_GetReleaseByTag(t *testing.T) { { name: "missing tag parameter", mockedClient: NewMockedHTTPClient(), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -3195,7 +3244,7 @@ func Test_GetReleaseByTag(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "tag": "v999.0.0", @@ -3214,7 +3263,7 @@ func Test_GetReleaseByTag(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "tag": "v1.0.0", @@ -3760,7 +3809,7 @@ func Test_ListStarredRepositories(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedErrMsg string expectedCount int @@ -3776,7 +3825,7 @@ func Test_ListStarredRepositories(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: false, expectedCount: 2, }, @@ -3791,7 +3840,7 @@ func Test_ListStarredRepositories(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "username": "testuser", }, expectError: false, @@ -3808,7 +3857,7 @@ func Test_ListStarredRepositories(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: true, expectedErrMsg: "failed to list starred repositories", }, @@ -3875,7 +3924,7 @@ func Test_StarRepository(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedErrMsg string }{ @@ -3889,7 +3938,7 @@ func Test_StarRepository(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "testowner", "repo": "testrepo", }, @@ -3906,7 +3955,7 @@ func Test_StarRepository(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "testowner", "repo": "nonexistent", }, @@ -3966,7 +4015,7 @@ func Test_UnstarRepository(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedErrMsg string }{ @@ -3980,7 +4029,7 @@ func Test_UnstarRepository(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "testowner", "repo": "testrepo", }, @@ -3997,7 +4046,7 @@ func Test_UnstarRepository(t *testing.T) { }), ), ), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "testowner", "repo": "nonexistent", }, diff --git a/pkg/github/repository_resource.go b/pkg/github/repository_resource.go index 28ce63b46c..8b515d1b4a 100644 --- a/pkg/github/repository_resource.go +++ b/pkg/github/repository_resource.go @@ -17,7 +17,7 @@ import ( "github.com/github/github-mcp-server/pkg/octicons" "github.com/github/github-mcp-server/pkg/raw" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/yosida95/uritemplate/v3" ) @@ -257,3 +257,54 @@ func RepositoryResourceContentsHandler(resourceURITemplate *uritemplate.Template } } } + +// expandRepoResourceURI builds a resource URI using the appropriate URI template +// based on the provided parameters (sha, ref, or default). +func expandRepoResourceURI(owner, repo, sha, ref string, pathParts []string) (string, error) { + baseValues := uritemplate.Values{ + "owner": uritemplate.String(owner), + "repo": uritemplate.String(repo), + "path": uritemplate.List(pathParts...), + } + + switch { + case sha != "": + baseValues["sha"] = uritemplate.String(sha) + return repositoryResourceCommitContentURITemplate.Expand(baseValues) + + case ref != "": + // Parse ref to determine which template to use + switch { + case strings.HasPrefix(ref, "refs/heads/"): + branch := strings.TrimPrefix(ref, "refs/heads/") + baseValues["branch"] = uritemplate.String(branch) + return repositoryResourceBranchContentURITemplate.Expand(baseValues) + + case strings.HasPrefix(ref, "refs/tags/"): + tag := strings.TrimPrefix(ref, "refs/tags/") + baseValues["tag"] = uritemplate.String(tag) + return repositoryResourceTagContentURITemplate.Expand(baseValues) + + case strings.HasPrefix(ref, "refs/pull/") && strings.HasSuffix(ref, "/head"): + // Extract PR number from "refs/pull/{number}/head" + prPart := strings.TrimPrefix(ref, "refs/pull/") + prNumber := strings.TrimSuffix(prPart, "/head") + baseValues["prNumber"] = uritemplate.String(prNumber) + return repositoryResourcePrContentURITemplate.Expand(baseValues) + + case looksLikeSHA(ref): + // ref is actually a SHA (e.g., from resolveGitReference) + baseValues["sha"] = uritemplate.String(ref) + return repositoryResourceCommitContentURITemplate.Expand(baseValues) + + default: + // For other refs (like a branch name without refs/heads/ prefix), + // treat it as a branch + baseValues["branch"] = uritemplate.String(ref) + return repositoryResourceBranchContentURITemplate.Expand(baseValues) + } + + default: + return repositoryResourceContentURITemplate.Expand(baseValues) + } +} diff --git a/pkg/github/repository_resource_completions.go b/pkg/github/repository_resource_completions.go index c70cfe9488..ff9e23398a 100644 --- a/pkg/github/repository_resource_completions.go +++ b/pkg/github/repository_resource_completions.go @@ -6,7 +6,7 @@ import ( "fmt" "strings" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/repository_resource_completions_test.go b/pkg/github/repository_resource_completions_test.go index b6f83f3216..e5f1a35f93 100644 --- a/pkg/github/repository_resource_completions_test.go +++ b/pkg/github/repository_resource_completions_test.go @@ -6,7 +6,7 @@ import ( "fmt" "testing" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -257,7 +257,7 @@ func TestRepositoryResourceCompletionHandler_MaxResults(t *testing.T) { RepositoryResourceArgumentResolvers["owner"] = func(_ context.Context, _ *github.Client, _ map[string]string, _ string) ([]string, error) { // Return 150 results results := make([]string, 150) - for i := 0; i < 150; i++ { + for i := range 150 { results[i] = fmt.Sprintf("user%d", i) } return results, nil diff --git a/pkg/github/repository_resource_test.go b/pkg/github/repository_resource_test.go index a3b3ca7545..b032554d8e 100644 --- a/pkg/github/repository_resource_test.go +++ b/pkg/github/repository_resource_test.go @@ -7,7 +7,7 @@ import ( "testing" "github.com/github/github-mcp-server/pkg/raw" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/stretchr/testify/require" ) diff --git a/pkg/github/search.go b/pkg/github/search.go index 552fbfe781..d5ddb4a72a 100644 --- a/pkg/github/search.go +++ b/pkg/github/search.go @@ -12,7 +12,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/search_test.go b/pkg/github/search_test.go index e15758c3e7..85eb21bcb5 100644 --- a/pkg/github/search_test.go +++ b/pkg/github/search_test.go @@ -8,7 +8,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -59,7 +59,7 @@ func Test_SearchRepositories(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult *github.RepositoriesSearchResult expectedErrMsg string @@ -77,7 +77,7 @@ func Test_SearchRepositories(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "golang test", "sort": "stars", "order": "desc", @@ -98,7 +98,7 @@ func Test_SearchRepositories(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "golang test", }, expectError: false, @@ -112,7 +112,7 @@ func Test_SearchRepositories(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Invalid query"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "invalid:query", }, expectError: true, @@ -201,7 +201,7 @@ func Test_SearchRepositories_FullOutput(t *testing.T) { } handler := serverTool.Handler(deps) - args := map[string]interface{}{ + args := map[string]any{ "query": "golang test", "minimal_output": false, } @@ -271,7 +271,7 @@ func Test_SearchCode(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult *github.CodeSearchResult expectedErrMsg string @@ -289,7 +289,7 @@ func Test_SearchCode(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "fmt.Println language:go", "sort": "indexed", "order": "desc", @@ -310,7 +310,7 @@ func Test_SearchCode(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "fmt.Println language:go", }, expectError: false, @@ -324,7 +324,7 @@ func Test_SearchCode(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Validation Failed"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "invalid:query", }, expectError: true, @@ -422,7 +422,7 @@ func Test_SearchUsers(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult *github.UsersSearchResult expectedErrMsg string @@ -440,7 +440,7 @@ func Test_SearchUsers(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "location:finland language:go", "sort": "followers", "order": "desc", @@ -461,7 +461,7 @@ func Test_SearchUsers(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "location:finland language:go", }, expectError: false, @@ -478,7 +478,7 @@ func Test_SearchUsers(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "type:user location:seattle followers:>100", }, expectError: false, @@ -495,7 +495,7 @@ func Test_SearchUsers(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "type:user (location:seattle OR location:california) followers:>50", }, expectError: false, @@ -509,7 +509,7 @@ func Test_SearchUsers(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Validation Failed"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "invalid:query", }, expectError: true, @@ -608,7 +608,7 @@ func Test_SearchOrgs(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedResult *github.UsersSearchResult expectedErrMsg string @@ -624,7 +624,7 @@ func Test_SearchOrgs(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "github", }, expectError: false, @@ -641,7 +641,7 @@ func Test_SearchOrgs(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "type:org location:california followers:>1000", }, expectError: false, @@ -658,7 +658,7 @@ func Test_SearchOrgs(t *testing.T) { mockResponse(t, http.StatusOK, mockSearchResult), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "type:org (location:seattle OR location:california OR location:newyork) repos:>10", }, expectError: false, @@ -672,7 +672,7 @@ func Test_SearchOrgs(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Validation Failed"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "query": "invalid:query", }, expectError: true, diff --git a/pkg/github/search_utils.go b/pkg/github/search_utils.go index 1008200d19..c5502f6308 100644 --- a/pkg/github/search_utils.go +++ b/pkg/github/search_utils.go @@ -10,7 +10,7 @@ import ( ghErrors "github.com/github/github-mcp-server/pkg/errors" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/secret_scanning.go b/pkg/github/secret_scanning.go index fa60021e53..676c2c1625 100644 --- a/pkg/github/secret_scanning.go +++ b/pkg/github/secret_scanning.go @@ -12,7 +12,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/secret_scanning_test.go b/pkg/github/secret_scanning_test.go index ed05d22150..7c53de35c5 100644 --- a/pkg/github/secret_scanning_test.go +++ b/pkg/github/secret_scanning_test.go @@ -8,7 +8,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -40,7 +40,7 @@ func Test_GetSecretScanningAlert(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAlert *github.SecretScanningAlert expectedErrMsg string @@ -50,7 +50,7 @@ func Test_GetSecretScanningAlert(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetReposSecretScanningAlertsByOwnerByRepoByAlertNumber: mockResponse(t, http.StatusOK, mockAlert), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "alertNumber": float64(42), @@ -66,7 +66,7 @@ func Test_GetSecretScanningAlert(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "alertNumber": float64(9999), @@ -156,7 +156,7 @@ func Test_ListSecretScanningAlerts(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAlerts []*github.SecretScanningAlert expectedErrMsg string @@ -170,7 +170,7 @@ func Test_ListSecretScanningAlerts(t *testing.T) { mockResponse(t, http.StatusOK, []*github.SecretScanningAlert{&resolvedAlert}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", "state": "resolved", @@ -185,7 +185,7 @@ func Test_ListSecretScanningAlerts(t *testing.T) { mockResponse(t, http.StatusOK, []*github.SecretScanningAlert{&resolvedAlert, &openAlert}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -200,7 +200,7 @@ func Test_ListSecretScanningAlerts(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Unauthorized access"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, diff --git a/pkg/github/security_advisories.go b/pkg/github/security_advisories.go index 7bdb978cdb..e86e220eaf 100644 --- a/pkg/github/security_advisories.go +++ b/pkg/github/security_advisories.go @@ -12,7 +12,7 @@ import ( "github.com/github/github-mcp-server/pkg/scopes" "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) diff --git a/pkg/github/security_advisories_test.go b/pkg/github/security_advisories_test.go index bfc4c6985e..3d4df43e63 100644 --- a/pkg/github/security_advisories_test.go +++ b/pkg/github/security_advisories_test.go @@ -8,7 +8,7 @@ import ( "github.com/github/github-mcp-server/internal/toolsnaps" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/google/jsonschema-go/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -42,7 +42,7 @@ func Test_ListGlobalSecurityAdvisories(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAdvisories []*github.GlobalSecurityAdvisory expectedErrMsg string @@ -52,7 +52,7 @@ func Test_ListGlobalSecurityAdvisories(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetAdvisories: mockResponse(t, http.StatusOK, []*github.GlobalSecurityAdvisory{mockAdvisory}), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "type": "reviewed", "ecosystem": "npm", "severity": "high", @@ -68,7 +68,7 @@ func Test_ListGlobalSecurityAdvisories(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Bad Request"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "type": "reviewed", "severity": "extreme", }, @@ -83,7 +83,7 @@ func Test_ListGlobalSecurityAdvisories(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Internal Server Error"}`)) }), }), - requestArgs: map[string]interface{}{}, + requestArgs: map[string]any{}, expectError: true, expectedErrMsg: "failed to list global security advisories", }, @@ -155,7 +155,7 @@ func Test_GetGlobalSecurityAdvisory(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAdvisory *github.GlobalSecurityAdvisory expectedErrMsg string @@ -165,7 +165,7 @@ func Test_GetGlobalSecurityAdvisory(t *testing.T) { mockedClient: MockHTTPClientWithHandlers(map[string]http.HandlerFunc{ GetAdvisoriesByGhsaID: mockResponse(t, http.StatusOK, mockAdvisory), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "ghsaId": "GHSA-xxxx-xxxx-xxxx", }, expectError: false, @@ -179,7 +179,7 @@ func Test_GetGlobalSecurityAdvisory(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Bad Request"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "ghsaId": "invalid-ghsa-id", }, expectError: true, @@ -193,7 +193,7 @@ func Test_GetGlobalSecurityAdvisory(t *testing.T) { _, _ = w.Write([]byte(`{"message": "Not Found"}`)) }), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "ghsaId": "GHSA-xxxx-xxxx-xxxx", }, expectError: true, @@ -270,7 +270,7 @@ func Test_ListRepositorySecurityAdvisories(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAdvisories []*github.SecurityAdvisory expectedErrMsg string @@ -285,7 +285,7 @@ func Test_ListRepositorySecurityAdvisories(t *testing.T) { mockResponse(t, http.StatusOK, []*github.SecurityAdvisory{adv1, adv2}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -306,7 +306,7 @@ func Test_ListRepositorySecurityAdvisories(t *testing.T) { mockResponse(t, http.StatusOK, []*github.SecurityAdvisory{adv1}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "octo", "repo": "hello-world", "direction": "desc", @@ -326,7 +326,7 @@ func Test_ListRepositorySecurityAdvisories(t *testing.T) { mockResponse(t, http.StatusInternalServerError, map[string]string{"message": "Internal Server Error"}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "owner": "owner", "repo": "repo", }, @@ -403,7 +403,7 @@ func Test_ListOrgRepositorySecurityAdvisories(t *testing.T) { tests := []struct { name string mockedClient *http.Client - requestArgs map[string]interface{} + requestArgs map[string]any expectError bool expectedAdvisories []*github.SecurityAdvisory expectedErrMsg string @@ -418,7 +418,7 @@ func Test_ListOrgRepositorySecurityAdvisories(t *testing.T) { mockResponse(t, http.StatusOK, []*github.SecurityAdvisory{adv1, adv2}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "org": "octo", }, expectError: false, @@ -438,7 +438,7 @@ func Test_ListOrgRepositorySecurityAdvisories(t *testing.T) { mockResponse(t, http.StatusOK, []*github.SecurityAdvisory{adv1}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "org": "octo", "direction": "asc", "sort": "created", @@ -457,7 +457,7 @@ func Test_ListOrgRepositorySecurityAdvisories(t *testing.T) { mockResponse(t, http.StatusForbidden, map[string]string{"message": "Forbidden"}), ), }), - requestArgs: map[string]interface{}{ + requestArgs: map[string]any{ "org": "octo", }, expectError: true, diff --git a/pkg/github/server.go b/pkg/github/server.go index 8248da58fd..06c12575d2 100644 --- a/pkg/github/server.go +++ b/pkg/github/server.go @@ -3,433 +3,210 @@ package github import ( "context" "encoding/json" - "errors" "fmt" - "strconv" + "log/slog" "strings" + "time" + gherrors "github.com/github/github-mcp-server/pkg/errors" + "github.com/github/github-mcp-server/pkg/inventory" "github.com/github/github-mcp-server/pkg/octicons" + "github.com/github/github-mcp-server/pkg/translations" "github.com/github/github-mcp-server/pkg/utils" - "github.com/google/go-github/v79/github" - "github.com/google/jsonschema-go/jsonschema" "github.com/modelcontextprotocol/go-sdk/mcp" ) -// NewServer creates a new GitHub MCP server with the specified GH client and logger. - -func NewServer(version string, opts *mcp.ServerOptions) *mcp.Server { - if opts == nil { - opts = &mcp.ServerOptions{} - } - - // Create a new MCP server - s := mcp.NewServer(&mcp.Implementation{ - Name: "github-mcp-server", - Title: "GitHub MCP Server", - Version: version, - Icons: octicons.Icons("mark-github"), - }, opts) - - return s -} - -func CompletionsHandler(getClient GetClientFn) func(ctx context.Context, req *mcp.CompleteRequest) (*mcp.CompleteResult, error) { - return func(ctx context.Context, req *mcp.CompleteRequest) (*mcp.CompleteResult, error) { - switch req.Params.Ref.Type { - case "ref/resource": - if strings.HasPrefix(req.Params.Ref.URI, "repo://") { - return RepositoryResourceCompletionHandler(getClient)(ctx, req) - } - return nil, fmt.Errorf("unsupported resource URI: %s", req.Params.Ref.URI) - case "ref/prompt": - return nil, nil - default: - return nil, fmt.Errorf("unsupported ref type: %s", req.Params.Ref.Type) - } - } -} +type MCPServerConfig struct { + // Version of the server + Version string -// OptionalParamOK is a helper function that can be used to fetch a requested parameter from the request. -// It returns the value, a boolean indicating if the parameter was present, and an error if the type is wrong. -func OptionalParamOK[T any, A map[string]any](args A, p string) (value T, ok bool, err error) { - // Check if the parameter is present in the request - val, exists := args[p] - if !exists { - // Not present, return zero value, false, no error - return - } + // GitHub Host to target for API requests (e.g. github.com or github.enterprise.com) + Host string - // Check if the parameter is of the expected type - value, ok = val.(T) - if !ok { - // Present but wrong type - err = fmt.Errorf("parameter %s is not of type %T, is %T", p, value, val) - ok = true // Set ok to true because the parameter *was* present, even if wrong type - return - } + // GitHub Token to authenticate with the GitHub API + Token string - // Present and correct type - ok = true - return -} + // EnabledToolsets is a list of toolsets to enable + // See: https://github.com/github/github-mcp-server?tab=readme-ov-file#tool-configuration + EnabledToolsets []string -// isAcceptedError checks if the error is an accepted error. -func isAcceptedError(err error) bool { - var acceptedError *github.AcceptedError - return errors.As(err, &acceptedError) -} + // EnabledTools is a list of specific tools to enable (additive to toolsets) + // When specified, these tools are registered in addition to any specified toolset tools + EnabledTools []string -// RequiredParam is a helper function that can be used to fetch a requested parameter from the request. -// It does the following checks: -// 1. Checks if the parameter is present in the request. -// 2. Checks if the parameter is of the expected type. -// 3. Checks if the parameter is not empty, i.e: non-zero value -func RequiredParam[T comparable](args map[string]any, p string) (T, error) { - var zero T - - // Check if the parameter is present in the request - if _, ok := args[p]; !ok { - return zero, fmt.Errorf("missing required parameter: %s", p) - } + // EnabledFeatures is a list of feature flags that are enabled + // Items with FeatureFlagEnable matching an entry in this list will be available + EnabledFeatures []string - // Check if the parameter is of the expected type - val, ok := args[p].(T) - if !ok { - return zero, fmt.Errorf("parameter %s is not of type %T", p, zero) - } + // Whether to enable dynamic toolsets + // See: https://github.com/github/github-mcp-server?tab=readme-ov-file#dynamic-tool-discovery + DynamicToolsets bool - if val == zero { - return zero, fmt.Errorf("missing required parameter: %s", p) - } + // ReadOnly indicates if we should only offer read-only tools + ReadOnly bool - return val, nil -} + // Translator provides translated text for the server tooling + Translator translations.TranslationHelperFunc -// RequiredInt is a helper function that can be used to fetch a requested parameter from the request. -// It does the following checks: -// 1. Checks if the parameter is present in the request. -// 2. Checks if the parameter is of the expected type. -// 3. Checks if the parameter is not empty, i.e: non-zero value -func RequiredInt(args map[string]any, p string) (int, error) { - v, err := RequiredParam[float64](args, p) - if err != nil { - return 0, err - } - return int(v), nil -} + // Content window size + ContentWindowSize int -// RequiredBigInt is a helper function that can be used to fetch a requested parameter from the request. -// It does the following checks: -// 1. Checks if the parameter is present in the request. -// 2. Checks if the parameter is of the expected type (float64). -// 3. Checks if the parameter is not empty, i.e: non-zero value. -// 4. Validates that the float64 value can be safely converted to int64 without truncation. -func RequiredBigInt(args map[string]any, p string) (int64, error) { - v, err := RequiredParam[float64](args, p) - if err != nil { - return 0, err - } + // LockdownMode indicates if we should enable lockdown mode + LockdownMode bool - result := int64(v) - // Check if converting back produces the same value to avoid silent truncation - if float64(result) != v { - return 0, fmt.Errorf("parameter %s value %f is too large to fit in int64", p, v) - } - return result, nil -} - -// OptionalParam is a helper function that can be used to fetch a requested parameter from the request. -// It does the following checks: -// 1. Checks if the parameter is present in the request, if not, it returns its zero-value -// 2. If it is present, it checks if the parameter is of the expected type and returns it -func OptionalParam[T any](args map[string]any, p string) (T, error) { - var zero T + // InsidersMode indicates if we should enable experimental features + InsidersMode bool - // Check if the parameter is present in the request - if _, ok := args[p]; !ok { - return zero, nil - } + // Logger is used for logging within the server + Logger *slog.Logger + // RepoAccessTTL overrides the default TTL for repository access cache entries. + RepoAccessTTL *time.Duration - // Check if the parameter is of the expected type - if _, ok := args[p].(T); !ok { - return zero, fmt.Errorf("parameter %s is not of type %T, is %T", p, zero, args[p]) - } + // ExcludeTools is a list of tool names that should be disabled regardless of + // other configuration. These tools will be excluded even if their toolset is enabled + // or they are explicitly listed in EnabledTools. + ExcludeTools []string - return args[p].(T), nil -} + // TokenScopes contains the OAuth scopes available to the token. + // When non-nil, tools requiring scopes not in this list will be hidden. + // This is used for PAT scope filtering where we can't issue scope challenges. + TokenScopes []string -// OptionalIntParam is a helper function that can be used to fetch a requested parameter from the request. -// It does the following checks: -// 1. Checks if the parameter is present in the request, if not, it returns its zero-value -// 2. If it is present, it checks if the parameter is of the expected type and returns it -func OptionalIntParam(args map[string]any, p string) (int, error) { - v, err := OptionalParam[float64](args, p) - if err != nil { - return 0, err - } - return int(v), nil + // Additional server options to apply + ServerOptions []MCPServerOption } -// OptionalIntParamWithDefault is a helper function that can be used to fetch a requested parameter from the request -// similar to optionalIntParam, but it also takes a default value. -func OptionalIntParamWithDefault(args map[string]any, p string, d int) (int, error) { - v, err := OptionalIntParam(args, p) - if err != nil { - return 0, err - } - if v == 0 { - return d, nil - } - return v, nil -} - -// OptionalBoolParamWithDefault is a helper function that can be used to fetch a requested parameter from the request -// similar to optionalBoolParam, but it also takes a default value. -func OptionalBoolParamWithDefault(args map[string]any, p string, d bool) (bool, error) { - _, ok := args[p] - v, err := OptionalParam[bool](args, p) - if err != nil { - return false, err - } - if !ok { - return d, nil - } - return v, nil -} +type MCPServerOption func(*mcp.ServerOptions) -// OptionalStringArrayParam is a helper function that can be used to fetch a requested parameter from the request. -// It does the following checks: -// 1. Checks if the parameter is present in the request, if not, it returns its zero-value -// 2. If it is present, iterates the elements and checks each is a string -func OptionalStringArrayParam(args map[string]any, p string) ([]string, error) { - // Check if the parameter is present in the request - if _, ok := args[p]; !ok { - return []string{}, nil +func NewMCPServer(ctx context.Context, cfg *MCPServerConfig, deps ToolDependencies, inv *inventory.Inventory, middleware ...mcp.Middleware) (*mcp.Server, error) { + // Create the MCP server + serverOpts := &mcp.ServerOptions{ + Instructions: inv.Instructions(), + Logger: cfg.Logger, + CompletionHandler: CompletionsHandler(deps.GetClient), } - switch v := args[p].(type) { - case nil: - return []string{}, nil - case []string: - return v, nil - case []any: - strSlice := make([]string, len(v)) - for i, v := range v { - s, ok := v.(string) - if !ok { - return []string{}, fmt.Errorf("parameter %s is not of type string, is %T", p, v) - } - strSlice[i] = s - } - return strSlice, nil - default: - return []string{}, fmt.Errorf("parameter %s could not be coerced to []string, is %T", p, args[p]) + // Apply any additional server options + for _, o := range cfg.ServerOptions { + o(serverOpts) } -} -func convertStringSliceToBigIntSlice(s []string) ([]int64, error) { - int64Slice := make([]int64, len(s)) - for i, str := range s { - val, err := convertStringToBigInt(str, 0) - if err != nil { - return nil, fmt.Errorf("failed to convert element %d (%s) to int64: %w", i, str, err) + // In dynamic mode, explicitly advertise capabilities since tools/resources/prompts + // may be enabled at runtime even if none are registered initially. + if cfg.DynamicToolsets { + serverOpts.Capabilities = &mcp.ServerCapabilities{ + Tools: &mcp.ToolCapabilities{}, + Resources: &mcp.ResourceCapabilities{}, + Prompts: &mcp.PromptCapabilities{}, } - int64Slice[i] = val } - return int64Slice, nil -} -func convertStringToBigInt(s string, def int64) (int64, error) { - v, err := strconv.ParseInt(s, 10, 64) - if err != nil { - return def, fmt.Errorf("failed to convert string %s to int64: %w", s, err) - } - return v, nil -} + ghServer := NewServer(cfg.Version, serverOpts) -// OptionalBigIntArrayParam is a helper function that can be used to fetch a requested parameter from the request. -// It does the following checks: -// 1. Checks if the parameter is present in the request, if not, it returns an empty slice -// 2. If it is present, iterates the elements, checks each is a string, and converts them to int64 values -func OptionalBigIntArrayParam(args map[string]any, p string) ([]int64, error) { - // Check if the parameter is present in the request - if _, ok := args[p]; !ok { - return []int64{}, nil - } + // Add middlewares. Order matters - for example, the error context middleware should be applied last so that it runs FIRST (closest to the handler) to ensure all errors are captured, + // and any middleware that needs to read or modify the context should be before it. + ghServer.AddReceivingMiddleware(middleware...) + ghServer.AddReceivingMiddleware(InjectDepsMiddleware(deps)) + ghServer.AddReceivingMiddleware(addGitHubAPIErrorToContext) - switch v := args[p].(type) { - case nil: - return []int64{}, nil - case []string: - return convertStringSliceToBigIntSlice(v) - case []any: - int64Slice := make([]int64, len(v)) - for i, v := range v { - s, ok := v.(string) - if !ok { - return []int64{}, fmt.Errorf("parameter %s is not of type string, is %T", p, v) - } - val, err := convertStringToBigInt(s, 0) - if err != nil { - return []int64{}, fmt.Errorf("parameter %s: failed to convert element %d (%s) to int64: %w", p, i, s, err) - } - int64Slice[i] = val - } - return int64Slice, nil - default: - return []int64{}, fmt.Errorf("parameter %s could not be coerced to []int64, is %T", p, args[p]) + if unrecognized := inv.UnrecognizedToolsets(); len(unrecognized) > 0 { + cfg.Logger.Warn("Warning: unrecognized toolsets ignored", "toolsets", strings.Join(unrecognized, ", ")) } -} -// WithPagination adds REST API pagination parameters to a tool. -// https://docs.github.com/en/rest/using-the-rest-api/using-pagination-in-the-rest-api -func WithPagination(schema *jsonschema.Schema) *jsonschema.Schema { - schema.Properties["page"] = &jsonschema.Schema{ - Type: "number", - Description: "Page number for pagination (min 1)", - Minimum: jsonschema.Ptr(1.0), - } + // Register GitHub tools/resources/prompts from the inventory. + // In dynamic mode with no explicit toolsets, this is a no-op since enabledToolsets + // is empty - users enable toolsets at runtime via the dynamic tools below (but can + // enable toolsets or tools explicitly that do need registration). + inv.RegisterAll(ctx, ghServer, deps) - schema.Properties["perPage"] = &jsonschema.Schema{ - Type: "number", - Description: "Results per page for pagination (min 1, max 100)", - Minimum: jsonschema.Ptr(1.0), - Maximum: jsonschema.Ptr(100.0), + // Register dynamic toolset management tools (enable/disable) - these are separate + // meta-tools that control the inventory, not part of the inventory itself + if cfg.DynamicToolsets { + registerDynamicTools(ghServer, inv, deps, cfg.Translator) } - return schema + return ghServer, nil } -// WithUnifiedPagination adds REST API pagination parameters to a tool. -// GraphQL tools will use this and convert page/perPage to GraphQL cursor parameters internally. -func WithUnifiedPagination(schema *jsonschema.Schema) *jsonschema.Schema { - schema.Properties["page"] = &jsonschema.Schema{ - Type: "number", - Description: "Page number for pagination (min 1)", - Minimum: jsonschema.Ptr(1.0), +// registerDynamicTools adds the dynamic toolset enable/disable tools to the server. +func registerDynamicTools(server *mcp.Server, inventory *inventory.Inventory, deps ToolDependencies, t translations.TranslationHelperFunc) { + dynamicDeps := DynamicToolDependencies{ + Server: server, + Inventory: inventory, + ToolDeps: deps, + T: t, } - - schema.Properties["perPage"] = &jsonschema.Schema{ - Type: "number", - Description: "Results per page for pagination (min 1, max 100)", - Minimum: jsonschema.Ptr(1.0), - Maximum: jsonschema.Ptr(100.0), + for _, tool := range DynamicTools(inventory) { + tool.RegisterFunc(server, dynamicDeps) } - - schema.Properties["after"] = &jsonschema.Schema{ - Type: "string", - Description: "Cursor for pagination. Use the endCursor from the previous page's PageInfo for GraphQL APIs.", - } - - return schema } -// WithCursorPagination adds only cursor-based pagination parameters to a tool (no page parameter). -func WithCursorPagination(schema *jsonschema.Schema) *jsonschema.Schema { - schema.Properties["perPage"] = &jsonschema.Schema{ - Type: "number", - Description: "Results per page for pagination (min 1, max 100)", - Minimum: jsonschema.Ptr(1.0), - Maximum: jsonschema.Ptr(100.0), +// ResolvedEnabledToolsets determines which toolsets should be enabled based on config. +// Returns nil for "use defaults", empty slice for "none", or explicit list. +func ResolvedEnabledToolsets(dynamicToolsets bool, enabledToolsets []string, enabledTools []string) []string { + // In dynamic mode, remove "all" and "default" since users enable toolsets on demand + if dynamicToolsets && enabledToolsets != nil { + enabledToolsets = RemoveToolset(enabledToolsets, string(ToolsetMetadataAll.ID)) + enabledToolsets = RemoveToolset(enabledToolsets, string(ToolsetMetadataDefault.ID)) } - schema.Properties["after"] = &jsonschema.Schema{ - Type: "string", - Description: "Cursor for pagination. Use the endCursor from the previous page's PageInfo for GraphQL APIs.", + if enabledToolsets != nil { + return enabledToolsets } - - return schema -} - -type PaginationParams struct { - Page int - PerPage int - After string -} - -// OptionalPaginationParams returns the "page", "perPage", and "after" parameters from the request, -// or their default values if not present, "page" default is 1, "perPage" default is 30. -// In future, we may want to make the default values configurable, or even have this -// function returned from `withPagination`, where the defaults are provided alongside -// the min/max values. -func OptionalPaginationParams(args map[string]any) (PaginationParams, error) { - page, err := OptionalIntParamWithDefault(args, "page", 1) - if err != nil { - return PaginationParams{}, err - } - perPage, err := OptionalIntParamWithDefault(args, "perPage", 30) - if err != nil { - return PaginationParams{}, err - } - after, err := OptionalParam[string](args, "after") - if err != nil { - return PaginationParams{}, err - } - return PaginationParams{ - Page: page, - PerPage: perPage, - After: after, - }, nil -} - -// OptionalCursorPaginationParams returns the "perPage" and "after" parameters from the request, -// without the "page" parameter, suitable for cursor-based pagination only. -func OptionalCursorPaginationParams(args map[string]any) (CursorPaginationParams, error) { - perPage, err := OptionalIntParamWithDefault(args, "perPage", 30) - if err != nil { - return CursorPaginationParams{}, err + if dynamicToolsets { + // Dynamic mode with no toolsets specified: start empty so users enable on demand + return []string{} } - after, err := OptionalParam[string](args, "after") - if err != nil { - return CursorPaginationParams{}, err + if len(enabledTools) > 0 { + // When specific tools are requested but no toolsets, don't use default toolsets + // This matches the original behavior: --tools=X alone registers only X + return []string{} } - return CursorPaginationParams{ - PerPage: perPage, - After: after, - }, nil -} -type CursorPaginationParams struct { - PerPage int - After string + // nil means "use defaults" in WithToolsets + return nil } -// ToGraphQLParams converts cursor pagination parameters to GraphQL-specific parameters. -func (p CursorPaginationParams) ToGraphQLParams() (*GraphQLPaginationParams, error) { - if p.PerPage > 100 { - return nil, fmt.Errorf("perPage value %d exceeds maximum of 100", p.PerPage) - } - if p.PerPage < 0 { - return nil, fmt.Errorf("perPage value %d cannot be negative", p.PerPage) +func addGitHubAPIErrorToContext(next mcp.MethodHandler) mcp.MethodHandler { + return func(ctx context.Context, method string, req mcp.Request) (result mcp.Result, err error) { + // Ensure the context is cleared of any previous errors + // as context isn't propagated through middleware + ctx = gherrors.ContextWithGitHubErrors(ctx) + return next(ctx, method, req) } - first := int32(p.PerPage) +} - var after *string - if p.After != "" { - after = &p.After +// NewServer creates a new GitHub MCP server with the specified GH client and logger. +func NewServer(version string, opts *mcp.ServerOptions) *mcp.Server { + if opts == nil { + opts = &mcp.ServerOptions{} } - return &GraphQLPaginationParams{ - First: &first, - After: after, - }, nil -} + // Create a new MCP server + s := mcp.NewServer(&mcp.Implementation{ + Name: "github-mcp-server", + Title: "GitHub MCP Server", + Version: version, + Icons: octicons.Icons("mark-github"), + }, opts) -type GraphQLPaginationParams struct { - First *int32 - After *string + return s } -// ToGraphQLParams converts REST API pagination parameters to GraphQL-specific parameters. -// This converts page/perPage to first parameter for GraphQL queries. -// If After is provided, it takes precedence over page-based pagination. -func (p PaginationParams) ToGraphQLParams() (*GraphQLPaginationParams, error) { - // Convert to CursorPaginationParams and delegate to avoid duplication - cursor := CursorPaginationParams{ - PerPage: p.PerPage, - After: p.After, +func CompletionsHandler(getClient GetClientFn) func(ctx context.Context, req *mcp.CompleteRequest) (*mcp.CompleteResult, error) { + return func(ctx context.Context, req *mcp.CompleteRequest) (*mcp.CompleteResult, error) { + switch req.Params.Ref.Type { + case "ref/resource": + if strings.HasPrefix(req.Params.Ref.URI, "repo://") { + return RepositoryResourceCompletionHandler(getClient)(ctx, req) + } + return nil, fmt.Errorf("unsupported resource URI: %s", req.Params.Ref.URI) + case "ref/prompt": + return nil, nil + default: + return nil, fmt.Errorf("unsupported ref type: %s", req.Params.Ref.Type) + } } - return cursor.ToGraphQLParams() } func MarshalledTextResult(v any) *mcp.CallToolResult { diff --git a/pkg/github/server_test.go b/pkg/github/server_test.go index f4ae5f831e..2b99cab12c 100644 --- a/pkg/github/server_test.go +++ b/pkg/github/server_test.go @@ -12,15 +12,16 @@ import ( "github.com/github/github-mcp-server/pkg/lockdown" "github.com/github/github-mcp-server/pkg/raw" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + gogithub "github.com/google/go-github/v82/github" "github.com/shurcooL/githubv4" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) // stubDeps is a test helper that implements ToolDependencies with configurable behavior. // Use this when you need to test error paths or when you need closure-based client creation. type stubDeps struct { - clientFn func(context.Context) (*github.Client, error) + clientFn func(context.Context) (*gogithub.Client, error) gqlClientFn func(context.Context) (*githubv4.Client, error) rawClientFn func(context.Context) (*raw.Client, error) @@ -30,7 +31,7 @@ type stubDeps struct { contentWindowSize int } -func (s stubDeps) GetClient(ctx context.Context) (*github.Client, error) { +func (s stubDeps) GetClient(ctx context.Context) (*gogithub.Client, error) { if s.clientFn != nil { return s.clientFn(ctx) } @@ -51,21 +52,23 @@ func (s stubDeps) GetRawClient(ctx context.Context) (*raw.Client, error) { return nil, nil } -func (s stubDeps) GetRepoAccessCache() *lockdown.RepoAccessCache { return s.repoAccessCache } +func (s stubDeps) GetRepoAccessCache(_ context.Context) (*lockdown.RepoAccessCache, error) { + return s.repoAccessCache, nil +} func (s stubDeps) GetT() translations.TranslationHelperFunc { return s.t } -func (s stubDeps) GetFlags() FeatureFlags { return s.flags } +func (s stubDeps) GetFlags(_ context.Context) FeatureFlags { return s.flags } func (s stubDeps) GetContentWindowSize() int { return s.contentWindowSize } func (s stubDeps) IsFeatureEnabled(_ context.Context, _ string) bool { return false } // Helper functions to create stub client functions for error testing -func stubClientFnFromHTTP(httpClient *http.Client) func(context.Context) (*github.Client, error) { - return func(_ context.Context) (*github.Client, error) { - return github.NewClient(httpClient), nil +func stubClientFnFromHTTP(httpClient *http.Client) func(context.Context) (*gogithub.Client, error) { + return func(_ context.Context) (*gogithub.Client, error) { + return gogithub.NewClient(httpClient), nil } } -func stubClientFnErr(errMsg string) func(context.Context) (*github.Client, error) { - return func(_ context.Context) (*github.Client, error) { +func stubClientFnErr(errMsg string) func(context.Context) (*gogithub.Client, error) { + return func(_ context.Context) (*gogithub.Client, error) { return nil, errors.New(errMsg) } } @@ -90,7 +93,7 @@ func stubFeatureFlags(enabledFlags map[string]bool) FeatureFlags { func badRequestHandler(msg string) http.HandlerFunc { return func(w http.ResponseWriter, _ *http.Request) { - structuredErrorResponse := github.ErrorResponse{ + structuredErrorResponse := gogithub.ErrorResponse{ Message: msg, } @@ -103,496 +106,116 @@ func badRequestHandler(msg string) http.HandlerFunc { } } -func Test_IsAcceptedError(t *testing.T) { - tests := []struct { - name string - err error - expectAccepted bool - }{ - { - name: "github AcceptedError", - err: &github.AcceptedError{}, - expectAccepted: true, - }, - { - name: "regular error", - err: fmt.Errorf("some other error"), - expectAccepted: false, - }, - { - name: "nil error", - err: nil, - expectAccepted: false, - }, - { - name: "wrapped AcceptedError", - err: fmt.Errorf("wrapped: %w", &github.AcceptedError{}), - expectAccepted: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := isAcceptedError(tc.err) - assert.Equal(t, tc.expectAccepted, result) - }) - } -} +// TestNewMCPServer_CreatesSuccessfully verifies that the server can be created +// with the deps injection middleware properly configured. +func TestNewMCPServer_CreatesSuccessfully(t *testing.T) { + t.Parallel() -func Test_RequiredStringParam(t *testing.T) { - tests := []struct { - name string - params map[string]interface{} - paramName string - expected string - expectError bool - }{ - { - name: "valid string parameter", - params: map[string]interface{}{"name": "test-value"}, - paramName: "name", - expected: "test-value", - expectError: false, - }, - { - name: "missing parameter", - params: map[string]interface{}{}, - paramName: "name", - expected: "", - expectError: true, - }, - { - name: "empty string parameter", - params: map[string]interface{}{"name": ""}, - paramName: "name", - expected: "", - expectError: true, - }, - { - name: "wrong type parameter", - params: map[string]interface{}{"name": 123}, - paramName: "name", - expected: "", - expectError: true, - }, + // Create a minimal server configuration + cfg := MCPServerConfig{ + Version: "test", + Host: "", // defaults to github.com + Token: "test-token", + EnabledToolsets: []string{"context"}, + ReadOnly: false, + Translator: translations.NullTranslationHelper, + ContentWindowSize: 5000, + LockdownMode: false, + InsidersMode: false, } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result, err := RequiredParam[string](tc.params, tc.paramName) + deps := stubDeps{} - if tc.expectError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - } - }) - } -} + // Build inventory + inv, err := NewInventory(cfg.Translator). + WithDeprecatedAliases(DeprecatedToolAliases). + WithToolsets(cfg.EnabledToolsets). + Build() -func Test_OptionalStringParam(t *testing.T) { - tests := []struct { - name string - params map[string]interface{} - paramName string - expected string - expectError bool - }{ - { - name: "valid string parameter", - params: map[string]interface{}{"name": "test-value"}, - paramName: "name", - expected: "test-value", - expectError: false, - }, - { - name: "missing parameter", - params: map[string]interface{}{}, - paramName: "name", - expected: "", - expectError: false, - }, - { - name: "empty string parameter", - params: map[string]interface{}{"name": ""}, - paramName: "name", - expected: "", - expectError: false, - }, - { - name: "wrong type parameter", - params: map[string]interface{}{"name": 123}, - paramName: "name", - expected: "", - expectError: true, - }, - } + require.NoError(t, err, "expected inventory build to succeed") - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result, err := OptionalParam[string](tc.params, tc.paramName) + // Create the server + server, err := NewMCPServer(context.Background(), &cfg, deps, inv) + require.NoError(t, err, "expected server creation to succeed") + require.NotNil(t, server, "expected server to be non-nil") - if tc.expectError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - } - }) - } + // The fact that the server was created successfully indicates that: + // 1. The deps injection middleware is properly added + // 2. Tools can be registered without panicking + // + // If the middleware wasn't properly added, tool calls would panic with + // "ToolDependencies not found in context" when executed. + // + // The actual middleware functionality and tool execution with ContextWithDeps + // is already tested in pkg/github/*_test.go. } -func Test_RequiredInt(t *testing.T) { - tests := []struct { - name string - params map[string]interface{} - paramName string - expected int - expectError bool - }{ - { - name: "valid number parameter", - params: map[string]interface{}{"count": float64(42)}, - paramName: "count", - expected: 42, - expectError: false, - }, - { - name: "missing parameter", - params: map[string]interface{}{}, - paramName: "count", - expected: 0, - expectError: true, - }, - { - name: "wrong type parameter", - params: map[string]interface{}{"count": "not-a-number"}, - paramName: "count", - expected: 0, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result, err := RequiredInt(tc.params, tc.paramName) +// TestResolveEnabledToolsets verifies the toolset resolution logic. +func TestResolveEnabledToolsets(t *testing.T) { + t.Parallel() - if tc.expectError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - } - }) - } -} -func Test_OptionalIntParam(t *testing.T) { tests := []struct { - name string - params map[string]interface{} - paramName string - expected int - expectError bool - }{ - { - name: "valid number parameter", - params: map[string]interface{}{"count": float64(42)}, - paramName: "count", - expected: 42, - expectError: false, - }, - { - name: "missing parameter", - params: map[string]interface{}{}, - paramName: "count", - expected: 0, - expectError: false, - }, - { - name: "zero value", - params: map[string]interface{}{"count": float64(0)}, - paramName: "count", - expected: 0, - expectError: false, - }, - { - name: "wrong type parameter", - params: map[string]interface{}{"count": "not-a-number"}, - paramName: "count", - expected: 0, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result, err := OptionalIntParam(tc.params, tc.paramName) - - if tc.expectError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - } - }) - } -} - -func Test_OptionalNumberParamWithDefault(t *testing.T) { - tests := []struct { - name string - params map[string]interface{} - paramName string - defaultVal int - expected int - expectError bool - }{ - { - name: "valid number parameter", - params: map[string]interface{}{"count": float64(42)}, - paramName: "count", - defaultVal: 10, - expected: 42, - expectError: false, - }, - { - name: "missing parameter", - params: map[string]interface{}{}, - paramName: "count", - defaultVal: 10, - expected: 10, - expectError: false, - }, - { - name: "zero value", - params: map[string]interface{}{"count": float64(0)}, - paramName: "count", - defaultVal: 10, - expected: 10, - expectError: false, - }, - { - name: "wrong type parameter", - params: map[string]interface{}{"count": "not-a-number"}, - paramName: "count", - defaultVal: 10, - expected: 0, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result, err := OptionalIntParamWithDefault(tc.params, tc.paramName, tc.defaultVal) - - if tc.expectError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - } - }) - } -} - -func Test_OptionalBooleanParam(t *testing.T) { - tests := []struct { - name string - params map[string]interface{} - paramName string - expected bool - expectError bool - }{ - { - name: "true value", - params: map[string]interface{}{"flag": true}, - paramName: "flag", - expected: true, - expectError: false, - }, - { - name: "false value", - params: map[string]interface{}{"flag": false}, - paramName: "flag", - expected: false, - expectError: false, - }, - { - name: "missing parameter", - params: map[string]interface{}{}, - paramName: "flag", - expected: false, - expectError: false, - }, - { - name: "wrong type parameter", - params: map[string]interface{}{"flag": "not-a-boolean"}, - paramName: "flag", - expected: false, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result, err := OptionalParam[bool](tc.params, tc.paramName) - - if tc.expectError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - } - }) - } -} - -func TestOptionalStringArrayParam(t *testing.T) { - tests := []struct { - name string - params map[string]interface{} - paramName string - expected []string - expectError bool - }{ - { - name: "parameter not in request", - params: map[string]any{}, - paramName: "flag", - expected: []string{}, - expectError: false, - }, - { - name: "valid any array parameter", - params: map[string]any{ - "flag": []any{"v1", "v2"}, - }, - paramName: "flag", - expected: []string{"v1", "v2"}, - expectError: false, - }, - { - name: "valid string array parameter", - params: map[string]any{ - "flag": []string{"v1", "v2"}, - }, - paramName: "flag", - expected: []string{"v1", "v2"}, - expectError: false, - }, - { - name: "wrong type parameter", - params: map[string]any{ - "flag": 1, - }, - paramName: "flag", - expected: []string{}, - expectError: true, - }, - { - name: "wrong slice type parameter", - params: map[string]any{ - "flag": []any{"foo", 2}, - }, - paramName: "flag", - expected: []string{}, - expectError: true, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result, err := OptionalStringArrayParam(tc.params, tc.paramName) - - if tc.expectError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - } - }) - } -} - -func TestOptionalPaginationParams(t *testing.T) { - tests := []struct { - name string - params map[string]any - expected PaginationParams - expectError bool + name string + cfg MCPServerConfig + expectedResult []string }{ { - name: "no pagination parameters, default values", - params: map[string]any{}, - expected: PaginationParams{ - Page: 1, - PerPage: 30, + name: "nil toolsets without dynamic mode and no tools - use defaults", + cfg: MCPServerConfig{ + EnabledToolsets: nil, + DynamicToolsets: false, + EnabledTools: nil, }, - expectError: false, + expectedResult: nil, // nil means "use defaults" }, { - name: "page parameter, default perPage", - params: map[string]any{ - "page": float64(2), + name: "nil toolsets with dynamic mode - start empty", + cfg: MCPServerConfig{ + EnabledToolsets: nil, + DynamicToolsets: true, + EnabledTools: nil, }, - expected: PaginationParams{ - Page: 2, - PerPage: 30, - }, - expectError: false, + expectedResult: []string{}, // empty slice means no toolsets }, { - name: "perPage parameter, default page", - params: map[string]any{ - "perPage": float64(50), - }, - expected: PaginationParams{ - Page: 1, - PerPage: 50, + name: "explicit toolsets", + cfg: MCPServerConfig{ + EnabledToolsets: []string{"repos", "issues"}, + DynamicToolsets: false, }, - expectError: false, + expectedResult: []string{"repos", "issues"}, }, { - name: "page and perPage parameters", - params: map[string]any{ - "page": float64(2), - "perPage": float64(50), + name: "empty toolsets - disable all", + cfg: MCPServerConfig{ + EnabledToolsets: []string{}, + DynamicToolsets: false, }, - expected: PaginationParams{ - Page: 2, - PerPage: 50, - }, - expectError: false, + expectedResult: []string{}, // empty slice means no toolsets }, { - name: "invalid page parameter", - params: map[string]any{ - "page": "not-a-number", + name: "specific tools without toolsets - no default toolsets", + cfg: MCPServerConfig{ + EnabledToolsets: nil, + DynamicToolsets: false, + EnabledTools: []string{"get_me"}, }, - expected: PaginationParams{}, - expectError: true, + expectedResult: []string{}, // empty slice when tools specified but no toolsets }, { - name: "invalid perPage parameter", - params: map[string]any{ - "perPage": "not-a-number", + name: "dynamic mode with explicit toolsets removes all and default", + cfg: MCPServerConfig{ + EnabledToolsets: []string{"all", "repos"}, + DynamicToolsets: true, }, - expected: PaginationParams{}, - expectError: true, + expectedResult: []string{"repos"}, // "all" is removed in dynamic mode }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - result, err := OptionalPaginationParams(tc.params) - - if tc.expectError { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - } + result := ResolvedEnabledToolsets(tc.cfg.DynamicToolsets, tc.cfg.EnabledToolsets, tc.cfg.EnabledTools) + assert.Equal(t, tc.expectedResult, result) }) } } diff --git a/pkg/github/tools.go b/pkg/github/tools.go index a169ff5910..0164b48e53 100644 --- a/pkg/github/tools.go +++ b/pkg/github/tools.go @@ -2,11 +2,12 @@ package github import ( "context" + "slices" "strings" "github.com/github/github-mcp-server/pkg/inventory" "github.com/github/github-mcp-server/pkg/translations" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/shurcooL/githubv4" ) @@ -213,6 +214,7 @@ func AllTools(t translations.TranslationHelperFunc) []inventory.ServerTool { RequestCopilotReview(t), PullRequestReviewWrite(t), AddCommentToPendingReview(t), + AddReplyToPullRequestComment(t), // Code security tools GetCodeScanningAlert(t), @@ -241,21 +243,6 @@ func AllTools(t translations.TranslationHelperFunc) []inventory.ServerTool { ListDiscussionCategories(t), // Actions tools - ListWorkflows(t), - ListWorkflowRuns(t), - GetWorkflowRun(t), - GetWorkflowRunLogs(t), - ListWorkflowJobs(t), - GetJobLogs(t), - ListWorkflowRunArtifacts(t), - DownloadWorkflowRunArtifact(t), - GetWorkflowRunUsage(t), - RunWorkflow(t), - RerunWorkflowRun(t), - RerunFailedJobs(t), - CancelWorkflowRun(t), - DeleteWorkflowRunLogs(t), - // Consolidated Actions tools (enabled via feature flag) ActionsList(t), ActionsGet(t), ActionsRunTrigger(t), @@ -274,17 +261,6 @@ func AllTools(t translations.TranslationHelperFunc) []inventory.ServerTool { UpdateGist(t), // Project tools - ListProjects(t), - GetProject(t), - ListProjectFields(t), - GetProjectField(t), - ListProjectItems(t), - GetProjectItem(t), - AddProjectItem(t), - DeleteProjectItem(t), - UpdateProjectItem(t), - - // Consolidated project tools (enabled via feature flag) ProjectsList(t), ProjectsGet(t), ProjectsWrite(t), @@ -418,12 +394,7 @@ func RemoveToolset(tools []string, toRemove string) []string { } func ContainsToolset(tools []string, toCheck string) bool { - for _, tool := range tools { - if tool == toCheck { - return true - } - } - return false + return slices.Contains(tools, toCheck) } // CleanTools cleans tool names by removing duplicates and trimming whitespace. diff --git a/pkg/github/toolset_instructions.go b/pkg/github/toolset_instructions.go index bf2388a3d9..bc9da4e65c 100644 --- a/pkg/github/toolset_instructions.go +++ b/pkg/github/toolset_instructions.go @@ -39,6 +39,8 @@ func generateProjectsToolsetInstructions(_ *inventory.Inventory) string { Workflow: 1) list_project_fields (get field IDs), 2) list_project_items (with pagination), 3) optional updates. +Status updates: Use list_project_status_updates to read recent project status updates (newest first). Use get_project_status_update with a node ID to get a single update. Use create_project_status_update to create a new status update for a project. + Field usage: - Call list_project_fields first to understand available fields and get IDs/types before filtering. - Use EXACT returned field names (case-insensitive match). Don't invent names or IDs. diff --git a/pkg/github/ui_capability.go b/pkg/github/ui_capability.go new file mode 100644 index 0000000000..a898382ccf --- /dev/null +++ b/pkg/github/ui_capability.go @@ -0,0 +1,27 @@ +package github + +import "github.com/modelcontextprotocol/go-sdk/mcp" + +// uiSupportedClients lists client names (from ClientInfo.Name) known to +// support MCP Apps UI rendering. +// +// This is a temporary workaround until the Go SDK adds an Extensions field +// to ClientCapabilities (see https://github.com/modelcontextprotocol/go-sdk/issues/777). +// Once that lands, detection should use capabilities.extensions instead. +var uiSupportedClients = map[string]bool{ + "Visual Studio Code - Insiders": true, + "Visual Studio Code": true, +} + +// clientSupportsUI reports whether the MCP client that sent this request +// supports MCP Apps UI rendering, based on its ClientInfo.Name. +func clientSupportsUI(req *mcp.CallToolRequest) bool { + if req == nil || req.Session == nil { + return false + } + params := req.Session.InitializeParams() + if params == nil || params.ClientInfo == nil { + return false + } + return uiSupportedClients[params.ClientInfo.Name] +} diff --git a/pkg/github/ui_capability_test.go b/pkg/github/ui_capability_test.go new file mode 100644 index 0000000000..59c08c4ad7 --- /dev/null +++ b/pkg/github/ui_capability_test.go @@ -0,0 +1,61 @@ +package github + +import ( + "context" + "testing" + + "github.com/modelcontextprotocol/go-sdk/mcp" + "github.com/stretchr/testify/assert" +) + +func Test_clientSupportsUI(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + clientName string + want bool + }{ + {name: "VS Code Insiders", clientName: "Visual Studio Code - Insiders", want: true}, + {name: "VS Code Stable", clientName: "Visual Studio Code", want: true}, + {name: "unknown client", clientName: "some-other-client", want: false}, + {name: "empty client name", clientName: "", want: false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + req := createMCPRequestWithSession(t, tt.clientName, nil) + assert.Equal(t, tt.want, clientSupportsUI(&req)) + }) + } + + t.Run("nil request", func(t *testing.T) { + assert.False(t, clientSupportsUI(nil)) + }) + + t.Run("nil session", func(t *testing.T) { + req := createMCPRequest(nil) + assert.False(t, clientSupportsUI(&req)) + }) +} + +func Test_clientSupportsUI_nilClientInfo(t *testing.T) { + t.Parallel() + + srv := mcp.NewServer(&mcp.Implementation{Name: "test"}, nil) + st, _ := mcp.NewInMemoryTransports() + session, err := srv.Connect(context.Background(), st, &mcp.ServerSessionOptions{ + State: &mcp.ServerSessionState{ + InitializeParams: &mcp.InitializeParams{ + ClientInfo: nil, + }, + }, + }) + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { _ = session.Close() }) + + req := mcp.CallToolRequest{Session: session} + assert.False(t, clientSupportsUI(&req)) +} diff --git a/pkg/github/ui_dist/.gitkeep b/pkg/github/ui_dist/.gitkeep new file mode 100644 index 0000000000..22302b5aef --- /dev/null +++ b/pkg/github/ui_dist/.gitkeep @@ -0,0 +1,3 @@ +# This directory contains built UI assets generated by script/build-ui +# The .gitkeep ensures the directory exists for the Go embed directive. +# Run script/build-ui to generate the actual HTML files. diff --git a/pkg/github/ui_dist/.placeholder.html b/pkg/github/ui_dist/.placeholder.html new file mode 100644 index 0000000000..2cc67e3c2b --- /dev/null +++ b/pkg/github/ui_dist/.placeholder.html @@ -0,0 +1,4 @@ + + + +Run script/build-ui to generate UI assets diff --git a/pkg/github/ui_embed.go b/pkg/github/ui_embed.go new file mode 100644 index 0000000000..257856e156 --- /dev/null +++ b/pkg/github/ui_embed.go @@ -0,0 +1,41 @@ +package github + +import ( + "embed" +) + +// UIAssets embeds the built MCP App UI HTML files. +// These files are generated by running `script/build-ui` which compiles +// the React/Primer components in the ui/ directory. +// +//go:embed ui_dist/*.html +var UIAssets embed.FS + +// GetUIAsset reads a UI asset from the embedded filesystem. +// The name should be just the filename (e.g., "get-me.html"). +func GetUIAsset(name string) (string, error) { + data, err := UIAssets.ReadFile("ui_dist/" + name) + if err != nil { + return "", err + } + return string(data), nil +} + +// MustGetUIAsset reads a UI asset and panics if it fails. +// Use this when the asset is required for server operation. +func MustGetUIAsset(name string) string { + html, err := GetUIAsset(name) + if err != nil { + panic("failed to load UI asset " + name + ": " + err.Error()) + } + return html +} + +// UIAssetsAvailable returns true if the MCP App UI assets have been built. +// This checks for a known UI asset file to determine if `script/build-ui` has been run. +// Use this to gracefully skip UI registration when assets aren't available, +// allowing Insiders mode to work for non-UI features without requiring a UI build. +func UIAssetsAvailable() bool { + _, err := GetUIAsset("get-me.html") + return err == nil +} diff --git a/pkg/github/ui_resources.go b/pkg/github/ui_resources.go new file mode 100644 index 0000000000..3fdb4a9357 --- /dev/null +++ b/pkg/github/ui_resources.go @@ -0,0 +1,89 @@ +package github + +import ( + "context" + + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +// RegisterUIResources registers MCP App UI resources with the server. +// These are static resources (not templates) that serve HTML content for +// MCP App-enabled tools. The HTML is built from React/Primer components +// in the ui/ directory using `script/build-ui`. +func RegisterUIResources(s *mcp.Server) { + // Register the get_me UI resource + s.AddResource( + &mcp.Resource{ + URI: GetMeUIResourceURI, + Name: "get_me_ui", + Description: "MCP App UI for the get_me tool", + MIMEType: "text/html", + }, + func(_ context.Context, _ *mcp.ReadResourceRequest) (*mcp.ReadResourceResult, error) { + html := MustGetUIAsset("get-me.html") + return &mcp.ReadResourceResult{ + Contents: []*mcp.ResourceContents{ + { + URI: GetMeUIResourceURI, + MIMEType: "text/html", + Text: html, + // MCP Apps UI metadata - CSP configuration to allow loading GitHub avatars + // See: https://github.com/modelcontextprotocol/ext-apps/blob/main/specification/draft/apps.mdx + Meta: mcp.Meta{ + "ui": map[string]any{ + "csp": map[string]any{ + // Allow loading images from GitHub's avatar CDN + "resourceDomains": []string{"https://avatars.githubusercontent.com"}, + }, + }, + }, + }, + }, + }, nil + }, + ) + + // Register the issue_write UI resource + s.AddResource( + &mcp.Resource{ + URI: IssueWriteUIResourceURI, + Name: "issue_write_ui", + Description: "MCP App UI for creating and updating GitHub issues", + MIMEType: "text/html", + }, + func(_ context.Context, _ *mcp.ReadResourceRequest) (*mcp.ReadResourceResult, error) { + html := MustGetUIAsset("issue-write.html") + return &mcp.ReadResourceResult{ + Contents: []*mcp.ResourceContents{ + { + URI: IssueWriteUIResourceURI, + MIMEType: "text/html", + Text: html, + }, + }, + }, nil + }, + ) + + // Register the create_pull_request UI resource + s.AddResource( + &mcp.Resource{ + URI: PullRequestWriteUIResourceURI, + Name: "pr_write_ui", + Description: "MCP App UI for creating GitHub pull requests", + MIMEType: "text/html", + }, + func(_ context.Context, _ *mcp.ReadResourceRequest) (*mcp.ReadResourceResult, error) { + html := MustGetUIAsset("pr-write.html") + return &mcp.ReadResourceResult{ + Contents: []*mcp.ResourceContents{ + { + URI: PullRequestWriteUIResourceURI, + MIMEType: "text/html", + Text: html, + }, + }, + }, nil + }, + ) +} diff --git a/pkg/http/handler.go b/pkg/http/handler.go new file mode 100644 index 0000000000..2e828211d1 --- /dev/null +++ b/pkg/http/handler.go @@ -0,0 +1,312 @@ +package http + +import ( + "context" + "errors" + "log/slog" + "net/http" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/github" + "github.com/github/github-mcp-server/pkg/http/middleware" + "github.com/github/github-mcp-server/pkg/http/oauth" + "github.com/github/github-mcp-server/pkg/inventory" + "github.com/github/github-mcp-server/pkg/scopes" + "github.com/github/github-mcp-server/pkg/translations" + "github.com/github/github-mcp-server/pkg/utils" + "github.com/go-chi/chi/v5" + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +type InventoryFactoryFunc func(r *http.Request) (*inventory.Inventory, error) + +// GitHubMCPServerFactoryFunc is a function type for creating a new MCP Server instance. +// middleware are applied AFTER the default GitHub MCP Server middlewares (like error context injection) +type GitHubMCPServerFactoryFunc func(r *http.Request, deps github.ToolDependencies, inventory *inventory.Inventory, cfg *github.MCPServerConfig) (*mcp.Server, error) + +type Handler struct { + ctx context.Context + config *ServerConfig + deps github.ToolDependencies + logger *slog.Logger + apiHosts utils.APIHostResolver + t translations.TranslationHelperFunc + githubMcpServerFactory GitHubMCPServerFactoryFunc + inventoryFactoryFunc InventoryFactoryFunc + oauthCfg *oauth.Config + scopeFetcher scopes.FetcherInterface + schemaCache *mcp.SchemaCache +} + +type HandlerOptions struct { + GitHubMcpServerFactory GitHubMCPServerFactoryFunc + InventoryFactory InventoryFactoryFunc + OAuthConfig *oauth.Config + ScopeFetcher scopes.FetcherInterface + FeatureChecker inventory.FeatureFlagChecker +} + +type HandlerOption func(*HandlerOptions) + +func WithScopeFetcher(f scopes.FetcherInterface) HandlerOption { + return func(o *HandlerOptions) { + o.ScopeFetcher = f + } +} + +func WithGitHubMCPServerFactory(f GitHubMCPServerFactoryFunc) HandlerOption { + return func(o *HandlerOptions) { + o.GitHubMcpServerFactory = f + } +} + +func WithInventoryFactory(f InventoryFactoryFunc) HandlerOption { + return func(o *HandlerOptions) { + o.InventoryFactory = f + } +} + +func WithOAuthConfig(cfg *oauth.Config) HandlerOption { + return func(o *HandlerOptions) { + o.OAuthConfig = cfg + } +} + +func WithFeatureChecker(checker inventory.FeatureFlagChecker) HandlerOption { + return func(o *HandlerOptions) { + o.FeatureChecker = checker + } +} + +func NewHTTPMcpHandler( + ctx context.Context, + cfg *ServerConfig, + deps github.ToolDependencies, + t translations.TranslationHelperFunc, + logger *slog.Logger, + apiHost utils.APIHostResolver, + options ...HandlerOption) *Handler { + opts := &HandlerOptions{} + for _, o := range options { + o(opts) + } + + githubMcpServerFactory := opts.GitHubMcpServerFactory + if githubMcpServerFactory == nil { + githubMcpServerFactory = DefaultGitHubMCPServerFactory + } + + scopeFetcher := opts.ScopeFetcher + if scopeFetcher == nil { + scopeFetcher = scopes.NewFetcher(apiHost, scopes.FetcherOptions{}) + } + + inventoryFactory := opts.InventoryFactory + if inventoryFactory == nil { + inventoryFactory = DefaultInventoryFactory(cfg, t, opts.FeatureChecker, scopeFetcher) + } + + // Create a shared schema cache to avoid repeated JSON schema reflection + // when a new MCP Server is created per request in stateless mode. + schemaCache := mcp.NewSchemaCache() + + return &Handler{ + ctx: ctx, + config: cfg, + deps: deps, + logger: logger, + apiHosts: apiHost, + t: t, + githubMcpServerFactory: githubMcpServerFactory, + inventoryFactoryFunc: inventoryFactory, + oauthCfg: opts.OAuthConfig, + scopeFetcher: scopeFetcher, + schemaCache: schemaCache, + } +} + +func (h *Handler) RegisterMiddleware(r chi.Router) { + r.Use( + middleware.ExtractUserToken(h.oauthCfg), + middleware.WithRequestConfig, + middleware.WithMCPParse(), + middleware.WithPATScopes(h.logger, h.scopeFetcher), + ) + + if h.config.ScopeChallenge { + r.Use(middleware.WithScopeChallenge(h.oauthCfg, h.scopeFetcher)) + } +} + +// RegisterRoutes registers the routes for the MCP server +// URL-based values take precedence over header-based values +func (h *Handler) RegisterRoutes(r chi.Router) { + // Base routes + r.Mount("/", h) + r.With(withReadonly).Mount("/readonly", h) + r.With(withInsiders).Mount("/insiders", h) + r.With(withReadonly, withInsiders).Mount("/readonly/insiders", h) + + // Toolset routes + r.With(withToolset).Mount("/x/{toolset}", h) + r.With(withToolset, withReadonly).Mount("/x/{toolset}/readonly", h) + r.With(withToolset, withInsiders).Mount("/x/{toolset}/insiders", h) + r.With(withToolset, withReadonly, withInsiders).Mount("/x/{toolset}/readonly/insiders", h) +} + +// withReadonly is middleware that sets readonly mode in the request context +func withReadonly(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := ghcontext.WithReadonly(r.Context(), true) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// withToolset is middleware that extracts the toolset from the URL and sets it in the request context +func withToolset(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + toolset := chi.URLParam(r, "toolset") + ctx := ghcontext.WithToolsets(r.Context(), []string{toolset}) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// withInsiders is middleware that sets insiders mode in the request context +func withInsiders(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := ghcontext.WithInsidersMode(r.Context(), true) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + inv, err := h.inventoryFactoryFunc(r) + if err != nil { + if errors.Is(err, inventory.ErrUnknownTools) { + w.WriteHeader(http.StatusBadRequest) + if _, writeErr := w.Write([]byte(err.Error())); writeErr != nil { + h.logger.Error("failed to write response", "error", writeErr) + } + return + } + + w.WriteHeader(http.StatusInternalServerError) + return + } + + invToUse := inv + if methodInfo, ok := ghcontext.MCPMethod(r.Context()); ok && methodInfo != nil { + invToUse = inv.ForMCPRequest(methodInfo.Method, methodInfo.ItemName) + } + + ghServer, err := h.githubMcpServerFactory(r, h.deps, invToUse, &github.MCPServerConfig{ + Version: h.config.Version, + Translator: h.t, + ContentWindowSize: h.config.ContentWindowSize, + Logger: h.logger, + RepoAccessTTL: h.config.RepoAccessCacheTTL, + // Explicitly set empty capabilities. inv.ForMCPRequest currently returns nothing for Initialize. + ServerOptions: []github.MCPServerOption{ + func(so *mcp.ServerOptions) { + so.Capabilities = &mcp.ServerCapabilities{ + Tools: &mcp.ToolCapabilities{}, + Resources: &mcp.ResourceCapabilities{}, + Prompts: &mcp.PromptCapabilities{}, + } + so.SchemaCache = h.schemaCache + }, + }, + }) + + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + + mcpHandler := mcp.NewStreamableHTTPHandler(func(_ *http.Request) *mcp.Server { + return ghServer + }, &mcp.StreamableHTTPOptions{ + Stateless: true, + }) + + mcpHandler.ServeHTTP(w, r) +} + +func DefaultGitHubMCPServerFactory(r *http.Request, deps github.ToolDependencies, inventory *inventory.Inventory, cfg *github.MCPServerConfig) (*mcp.Server, error) { + return github.NewMCPServer(r.Context(), cfg, deps, inventory) +} + +// DefaultInventoryFactory creates the default inventory factory for HTTP mode +func DefaultInventoryFactory(_ *ServerConfig, t translations.TranslationHelperFunc, featureChecker inventory.FeatureFlagChecker, scopeFetcher scopes.FetcherInterface) InventoryFactoryFunc { + return func(r *http.Request) (*inventory.Inventory, error) { + b := github.NewInventory(t). + WithDeprecatedAliases(github.DeprecatedToolAliases). + WithFeatureChecker(featureChecker) + + b = InventoryFiltersForRequest(r, b) + b = PATScopeFilter(b, r, scopeFetcher) + + b.WithServerInstructions() + + return b.Build() + } +} + +// InventoryFiltersForRequest applies filters to the inventory builder +// based on the request context and headers +func InventoryFiltersForRequest(r *http.Request, builder *inventory.Builder) *inventory.Builder { + ctx := r.Context() + + if ghcontext.IsReadonly(ctx) { + builder = builder.WithReadOnly(true) + } + + toolsets := ghcontext.GetToolsets(ctx) + tools := ghcontext.GetTools(ctx) + + if len(toolsets) > 0 { + builder = builder.WithToolsets(github.ResolvedEnabledToolsets(false, toolsets, tools)) // No dynamic toolsets in HTTP mode + } + + if len(tools) > 0 { + if len(toolsets) == 0 { + builder = builder.WithToolsets([]string{}) + } + builder = builder.WithTools(github.CleanTools(tools)) + } + + if excluded := ghcontext.GetExcludeTools(ctx); len(excluded) > 0 { + builder = builder.WithExcludeTools(excluded) + } + + return builder +} + +func PATScopeFilter(b *inventory.Builder, r *http.Request, fetcher scopes.FetcherInterface) *inventory.Builder { + ctx := r.Context() + + tokenInfo, ok := ghcontext.GetTokenInfo(ctx) + if !ok || tokenInfo == nil { + return b + } + + // Scopes should have already been fetched by the WithPATScopes middleware. + // Only classic PATs (ghp_ prefix) return OAuth scopes via X-OAuth-Scopes header. + // Fine-grained PATs and other token types don't support this, so we skip filtering. + if tokenInfo.TokenType == utils.TokenTypePersonalAccessToken { + // Check if scopes are already in context (should be set by WithPATScopes). If not, fetch them. + existingScopes, ok := ghcontext.GetTokenScopes(ctx) + if ok { + return b.WithFilter(github.CreateToolScopeFilter(existingScopes)) + } + + scopesList, err := fetcher.FetchTokenScopes(ctx, tokenInfo.Token) + if err != nil { + return b + } + + return b.WithFilter(github.CreateToolScopeFilter(scopesList)) + } + + return b +} diff --git a/pkg/http/handler_test.go b/pkg/http/handler_test.go new file mode 100644 index 0000000000..2a19e0a231 --- /dev/null +++ b/pkg/http/handler_test.go @@ -0,0 +1,411 @@ +package http + +import ( + "context" + "log/slog" + "net/http" + "net/http/httptest" + "slices" + "sort" + "testing" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/github" + "github.com/github/github-mcp-server/pkg/http/headers" + "github.com/github/github-mcp-server/pkg/inventory" + "github.com/github/github-mcp-server/pkg/scopes" + "github.com/github/github-mcp-server/pkg/translations" + "github.com/github/github-mcp-server/pkg/utils" + "github.com/go-chi/chi/v5" + "github.com/modelcontextprotocol/go-sdk/mcp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func mockTool(name, toolsetID string, readOnly bool) inventory.ServerTool { + return inventory.ServerTool{ + Tool: mcp.Tool{ + Name: name, + Annotations: &mcp.ToolAnnotations{ReadOnlyHint: readOnly}, + }, + Toolset: inventory.ToolsetMetadata{ + ID: inventory.ToolsetID(toolsetID), + Description: "Test: " + toolsetID, + }, + } +} + +type allScopesFetcher struct{} + +func (f allScopesFetcher) FetchTokenScopes(_ context.Context, _ string) ([]string, error) { + return []string{ + string(scopes.Repo), + string(scopes.WriteOrg), + string(scopes.User), + string(scopes.Gist), + string(scopes.Notifications), + }, nil +} + +var _ scopes.FetcherInterface = allScopesFetcher{} + +func mockToolWithFeatureFlag(name, toolsetID string, readOnly bool, enableFlag, disableFlag string) inventory.ServerTool { + tool := mockTool(name, toolsetID, readOnly) + tool.FeatureFlagEnable = enableFlag + tool.FeatureFlagDisable = disableFlag + return tool +} + +func TestInventoryFiltersForRequest(t *testing.T) { + tools := []inventory.ServerTool{ + mockTool("get_file_contents", "repos", true), + mockTool("create_repository", "repos", false), + mockTool("list_issues", "issues", true), + mockTool("issue_write", "issues", false), + } + + tests := []struct { + name string + contextSetup func(context.Context) context.Context + expectedTools []string + }{ + { + name: "no filters applies defaults", + contextSetup: func(ctx context.Context) context.Context { return ctx }, + expectedTools: []string{"get_file_contents", "create_repository", "list_issues", "issue_write"}, + }, + { + name: "readonly from context filters write tools", + contextSetup: func(ctx context.Context) context.Context { + return ghcontext.WithReadonly(ctx, true) + }, + expectedTools: []string{"get_file_contents", "list_issues"}, + }, + { + name: "toolset from context filters to toolset", + contextSetup: func(ctx context.Context) context.Context { + return ghcontext.WithToolsets(ctx, []string{"repos"}) + }, + expectedTools: []string{"get_file_contents", "create_repository"}, + }, + { + name: "tools alone clears default toolsets", + contextSetup: func(ctx context.Context) context.Context { + return ghcontext.WithTools(ctx, []string{"list_issues"}) + }, + expectedTools: []string{"list_issues"}, + }, + { + name: "tools are additive with toolsets", + contextSetup: func(ctx context.Context) context.Context { + ctx = ghcontext.WithToolsets(ctx, []string{"repos"}) + ctx = ghcontext.WithTools(ctx, []string{"list_issues"}) + return ctx + }, + expectedTools: []string{"get_file_contents", "create_repository", "list_issues"}, + }, + { + name: "excluded tools removes specific tools", + contextSetup: func(ctx context.Context) context.Context { + return ghcontext.WithExcludeTools(ctx, []string{"create_repository", "issue_write"}) + }, + expectedTools: []string{"get_file_contents", "list_issues"}, + }, + { + name: "excluded tools overrides explicit tools", + contextSetup: func(ctx context.Context) context.Context { + ctx = ghcontext.WithTools(ctx, []string{"list_issues", "create_repository"}) + ctx = ghcontext.WithExcludeTools(ctx, []string{"create_repository"}) + return ctx + }, + expectedTools: []string{"list_issues"}, + }, + { + name: "excluded tools combines with readonly", + contextSetup: func(ctx context.Context) context.Context { + ctx = ghcontext.WithReadonly(ctx, true) + ctx = ghcontext.WithExcludeTools(ctx, []string{"list_issues"}) + return ctx + }, + expectedTools: []string{"get_file_contents"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/", nil) + req = req.WithContext(tt.contextSetup(req.Context())) + + builder := inventory.NewBuilder(). + SetTools(tools). + WithToolsets([]string{"all"}) + + builder = InventoryFiltersForRequest(req, builder) + inv, err := builder.Build() + require.NoError(t, err) + + available := inv.AvailableTools(context.Background()) + toolNames := make([]string, len(available)) + for i, tool := range available { + toolNames[i] = tool.Tool.Name + } + + assert.ElementsMatch(t, tt.expectedTools, toolNames) + }) + } +} + +// testTools returns a set of mock tools across different toolsets with mixed read-only/write capabilities +func testTools() []inventory.ServerTool { + return []inventory.ServerTool{ + mockTool("get_file_contents", "repos", true), + mockTool("create_repository", "repos", false), + mockTool("list_issues", "issues", true), + mockTool("create_issue", "issues", false), + mockTool("list_pull_requests", "pull_requests", true), + mockTool("create_pull_request", "pull_requests", false), + // Feature-flagged tools for testing X-MCP-Features header + mockToolWithFeatureFlag("needs_holdback", "repos", true, "mcp_holdback_consolidated_projects", ""), + mockToolWithFeatureFlag("hidden_by_holdback", "repos", true, "", "mcp_holdback_consolidated_projects"), + } +} + +// extractToolNames extracts tool names from an inventory +func extractToolNames(ctx context.Context, inv *inventory.Inventory) []string { + available := inv.AvailableTools(ctx) + names := make([]string, len(available)) + for i, tool := range available { + names[i] = tool.Tool.Name + } + sort.Strings(names) + return names +} + +func TestHTTPHandlerRoutes(t *testing.T) { + tools := testTools() + + tests := []struct { + name string + path string + headers map[string]string + expectedTools []string + }{ + { + name: "root path returns all tools", + path: "/", + expectedTools: []string{"get_file_contents", "create_repository", "list_issues", "create_issue", "list_pull_requests", "create_pull_request", "hidden_by_holdback"}, + }, + { + name: "readonly path filters write tools", + path: "/readonly", + expectedTools: []string{"get_file_contents", "list_issues", "list_pull_requests", "hidden_by_holdback"}, + }, + { + name: "toolset path filters to toolset", + path: "/x/repos", + expectedTools: []string{"get_file_contents", "create_repository", "hidden_by_holdback"}, + }, + { + name: "toolset path with issues", + path: "/x/issues", + expectedTools: []string{"list_issues", "create_issue"}, + }, + { + name: "toolset readonly path filters to readonly tools in toolset", + path: "/x/repos/readonly", + expectedTools: []string{"get_file_contents", "hidden_by_holdback"}, + }, + { + name: "toolset readonly path with issues", + path: "/x/issues/readonly", + expectedTools: []string{"list_issues"}, + }, + { + name: "X-MCP-Tools header filters to specific tools", + path: "/", + headers: map[string]string{ + headers.MCPToolsHeader: "list_issues", + }, + expectedTools: []string{"list_issues"}, + }, + { + name: "X-MCP-Tools header with multiple tools", + path: "/", + headers: map[string]string{ + headers.MCPToolsHeader: "list_issues,get_file_contents", + }, + expectedTools: []string{"list_issues", "get_file_contents"}, + }, + { + name: "X-MCP-Tools header does not expose extra tools", + path: "/", + headers: map[string]string{ + headers.MCPToolsHeader: "list_issues", + }, + expectedTools: []string{"list_issues"}, + }, + { + name: "X-MCP-Readonly header filters write tools", + path: "/", + headers: map[string]string{ + headers.MCPReadOnlyHeader: "true", + }, + expectedTools: []string{"get_file_contents", "list_issues", "list_pull_requests", "hidden_by_holdback"}, + }, + { + name: "X-MCP-Toolsets header filters to toolset", + path: "/", + headers: map[string]string{ + headers.MCPToolsetsHeader: "repos", + }, + expectedTools: []string{"get_file_contents", "create_repository", "hidden_by_holdback"}, + }, + { + name: "URL toolset takes precedence over header toolset", + path: "/x/issues", + headers: map[string]string{ + headers.MCPToolsetsHeader: "repos", + }, + expectedTools: []string{"list_issues", "create_issue"}, + }, + { + name: "URL readonly takes precedence over header", + path: "/readonly", + headers: map[string]string{ + headers.MCPReadOnlyHeader: "false", + }, + expectedTools: []string{"get_file_contents", "list_issues", "list_pull_requests", "hidden_by_holdback"}, + }, + { + name: "X-MCP-Features header enables flagged tool", + path: "/", + headers: map[string]string{ + headers.MCPFeaturesHeader: "mcp_holdback_consolidated_projects", + }, + expectedTools: []string{"get_file_contents", "create_repository", "list_issues", "create_issue", "list_pull_requests", "create_pull_request", "needs_holdback"}, + }, + { + name: "X-MCP-Features header with unknown flag is ignored", + path: "/", + headers: map[string]string{ + headers.MCPFeaturesHeader: "unknown_flag", + }, + expectedTools: []string{"get_file_contents", "create_repository", "list_issues", "create_issue", "list_pull_requests", "create_pull_request", "hidden_by_holdback"}, + }, + { + name: "X-MCP-Exclude-Tools header removes specific tools", + path: "/", + headers: map[string]string{ + headers.MCPExcludeToolsHeader: "create_issue,create_pull_request", + }, + expectedTools: []string{"get_file_contents", "create_repository", "list_issues", "list_pull_requests", "hidden_by_holdback"}, + }, + { + name: "X-MCP-Exclude-Tools with toolset header", + path: "/", + headers: map[string]string{ + headers.MCPToolsetsHeader: "issues", + headers.MCPExcludeToolsHeader: "create_issue", + }, + expectedTools: []string{"list_issues"}, + }, + { + name: "X-MCP-Exclude-Tools overrides X-MCP-Tools", + path: "/", + headers: map[string]string{ + headers.MCPToolsHeader: "list_issues,create_issue", + headers.MCPExcludeToolsHeader: "create_issue", + }, + expectedTools: []string{"list_issues"}, + }, + { + name: "X-MCP-Exclude-Tools with readonly path", + path: "/readonly", + headers: map[string]string{ + headers.MCPExcludeToolsHeader: "list_issues", + }, + expectedTools: []string{"get_file_contents", "list_pull_requests", "hidden_by_holdback"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var capturedInventory *inventory.Inventory + var capturedCtx context.Context + + // Create feature checker that reads from context without whitelist validation + // (the whitelist is tested separately; here we test the filtering logic) + featureChecker := func(ctx context.Context, flag string) (bool, error) { + return slices.Contains(ghcontext.GetHeaderFeatures(ctx), flag), nil + } + + apiHost, err := utils.NewAPIHost("https://api.github.com") + require.NoError(t, err) + + // Create inventory factory that captures the built inventory + inventoryFactory := func(r *http.Request) (*inventory.Inventory, error) { + capturedCtx = r.Context() + builder := inventory.NewBuilder(). + SetTools(tools). + WithToolsets([]string{"all"}). + WithFeatureChecker(featureChecker) + builder = InventoryFiltersForRequest(r, builder) + inv, err := builder.Build() + if err != nil { + return nil, err + } + capturedInventory = inv + return inv, nil + } + + // Create mock MCP server factory that just returns a minimal server + mcpServerFactory := func(_ *http.Request, _ github.ToolDependencies, _ *inventory.Inventory, _ *github.MCPServerConfig) (*mcp.Server, error) { + return mcp.NewServer(&mcp.Implementation{Name: "test", Version: "0.0.1"}, nil), nil + } + + allScopesFetcher := allScopesFetcher{} + + // Create handler with our factories + handler := NewHTTPMcpHandler( + context.Background(), + &ServerConfig{Version: "test"}, + nil, // deps not needed for this test + translations.NullTranslationHelper, + slog.Default(), + apiHost, + WithInventoryFactory(inventoryFactory), + WithGitHubMCPServerFactory(mcpServerFactory), + WithScopeFetcher(allScopesFetcher), + ) + + // Create router and register routes + r := chi.NewRouter() + handler.RegisterMiddleware(r) + handler.RegisterRoutes(r) + + // Create request + req := httptest.NewRequest(http.MethodPost, tt.path, nil) + + // Ensure we're setting Authorization header for token context + req.Header.Set(headers.AuthorizationHeader, "Bearer ghp_testtoken") + + for k, v := range tt.headers { + req.Header.Set(k, v) + } + + // Execute request + rr := httptest.NewRecorder() + r.ServeHTTP(rr, req) + + // Verify the inventory was captured and has the expected tools + require.NotNil(t, capturedInventory, "inventory should have been created") + + toolNames := extractToolNames(capturedCtx, capturedInventory) + expectedSorted := make([]string, len(tt.expectedTools)) + copy(expectedSorted, tt.expectedTools) + sort.Strings(expectedSorted) + + assert.Equal(t, expectedSorted, toolNames, "tools should match expected") + }) + } +} diff --git a/pkg/http/headers/headers.go b/pkg/http/headers/headers.go new file mode 100644 index 0000000000..e032a0ce93 --- /dev/null +++ b/pkg/http/headers/headers.go @@ -0,0 +1,56 @@ +package headers + +const ( + // AuthorizationHeader is a standard HTTP Header. + AuthorizationHeader = "Authorization" + // ContentTypeHeader is a standard HTTP Header. + ContentTypeHeader = "Content-Type" + // AcceptHeader is a standard HTTP Header. + AcceptHeader = "Accept" + // UserAgentHeader is a standard HTTP Header. + UserAgentHeader = "User-Agent" + + // ContentTypeJSON is the standard MIME type for JSON. + ContentTypeJSON = "application/json" + // ContentTypeEventStream is the standard MIME type for Event Streams. + ContentTypeEventStream = "text/event-stream" + + // ForwardedForHeader is a standard HTTP Header used to forward the originating IP address of a client. + ForwardedForHeader = "X-Forwarded-For" + + // RealIPHeader is a standard HTTP Header used to indicate the real IP address of the client. + RealIPHeader = "X-Real-IP" + + // ForwardedHostHeader is a standard HTTP Header for preserving the original Host header when proxying. + ForwardedHostHeader = "X-Forwarded-Host" + // ForwardedProtoHeader is a standard HTTP Header for preserving the original protocol when proxying. + ForwardedProtoHeader = "X-Forwarded-Proto" + + // RequestHmacHeader is used to authenticate requests to the Raw API. + RequestHmacHeader = "Request-Hmac" + + // MCP-specific headers. + + // MCPReadOnlyHeader indicates whether the MCP is in read-only mode. + MCPReadOnlyHeader = "X-MCP-Readonly" + // MCPToolsetsHeader is a comma-separated list of MCP toolsets that the request is for. + MCPToolsetsHeader = "X-MCP-Toolsets" + // MCPToolsHeader is a comma-separated list of MCP tools that the request is for. + MCPToolsHeader = "X-MCP-Tools" + // MCPLockdownHeader indicates whether lockdown mode is enabled. + MCPLockdownHeader = "X-MCP-Lockdown" + // MCPInsidersHeader indicates whether insiders mode is enabled for early access features. + MCPInsidersHeader = "X-MCP-Insiders" + // MCPExcludeToolsHeader is a comma-separated list of MCP tools that should be + // disabled regardless of other settings or header values. + MCPExcludeToolsHeader = "X-MCP-Exclude-Tools" + // MCPFeaturesHeader is a comma-separated list of feature flags to enable. + MCPFeaturesHeader = "X-MCP-Features" + + // GitHub-specific headers. + + // GraphQLFeaturesHeader is a comma-separated list of GraphQL feature flags to enable for GraphQL requests. + GraphQLFeaturesHeader = "GraphQL-Features" + // GitHubAPIVersionHeader is the header used to specify the GitHub API version. + GitHubAPIVersionHeader = "X-GitHub-Api-Version" +) diff --git a/pkg/http/headers/parse.go b/pkg/http/headers/parse.go new file mode 100644 index 0000000000..2b5eddacdb --- /dev/null +++ b/pkg/http/headers/parse.go @@ -0,0 +1,21 @@ +package headers + +import "strings" + +// ParseCommaSeparated splits a header value by comma, trims whitespace, +// and filters out empty values +func ParseCommaSeparated(value string) []string { + if value == "" { + return []string{} + } + + parts := strings.Split(value, ",") + result := make([]string, 0, len(parts)) + for _, p := range parts { + trimmed := strings.TrimSpace(p) + if trimmed != "" { + result = append(result, trimmed) + } + } + return result +} diff --git a/pkg/http/headers/parse_test.go b/pkg/http/headers/parse_test.go new file mode 100644 index 0000000000..d8b55a696b --- /dev/null +++ b/pkg/http/headers/parse_test.go @@ -0,0 +1,58 @@ +package headers + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseCommaSeparated(t *testing.T) { + tests := []struct { + name string + input string + expected []string + }{ + { + name: "empty string", + input: "", + expected: []string{}, + }, + { + name: "single value", + input: "foo", + expected: []string{"foo"}, + }, + { + name: "multiple values", + input: "foo,bar,baz", + expected: []string{"foo", "bar", "baz"}, + }, + { + name: "whitespace trimmed", + input: " foo , bar , baz ", + expected: []string{"foo", "bar", "baz"}, + }, + { + name: "empty values filtered", + input: "foo,,bar,", + expected: []string{"foo", "bar"}, + }, + { + name: "only commas", + input: ",,,", + expected: []string{}, + }, + { + name: "whitespace only values filtered", + input: "foo, ,bar", + expected: []string{"foo", "bar"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := ParseCommaSeparated(tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} diff --git a/pkg/http/mark/mark.go b/pkg/http/mark/mark.go new file mode 100644 index 0000000000..859a30923d --- /dev/null +++ b/pkg/http/mark/mark.go @@ -0,0 +1,65 @@ +// Package mark provides a mechanism for tagging errors with a well-known error value. +package mark + +import "errors" + +// This list of errors is not exhaustive, but is a good starting point for most +// applications. Feel free to add more as needed, but don't go overboard. +// Remember, the specific types of errors are only important so far as someone +// calling your code might want to write logic to handle each type of error +// differently. +// +// Do not add application-specific errors to this list. Instead, just define +// your own package with your own application-specific errors, and use this +// package to mark errors with them. The errors in this package are not special, +// they're just plain old errors. +// +// Not all errors need to be marked. An error that is not marked should be +// treated as an unexpected error that cannot be handled by calling code. This +// is often the case for network errors or logic errors. +var ( + ErrNotFound = errors.New("not found") + ErrAlreadyExists = errors.New("already exists") + ErrBadRequest = errors.New("bad request") + ErrUnauthorized = errors.New("unauthorized") + ErrCancelled = errors.New("request cancelled") + ErrUnavailable = errors.New("unavailable") + ErrTimedout = errors.New("request timed out") + ErrTooLarge = errors.New("request is too large") + ErrTooManyRequests = errors.New("too many requests") + ErrForbidden = errors.New("forbidden") +) + +// With wraps err with another error that will return true from errors.Is and +// errors.As for both err and markErr, and anything either may wrap. +func With(err, markErr error) error { + if err == nil { + return nil + } + return marked{wrapped: err, mark: markErr} +} + +type marked struct { + wrapped error + mark error +} + +func (f marked) Is(target error) bool { + // if this is false, errors.Is will call unwrap and retry on the wrapped + // error. + return errors.Is(f.mark, target) +} + +func (f marked) As(target any) bool { + // if this is false, errors.As will call unwrap and retry on the wrapped + // error. + return errors.As(f.mark, target) +} + +func (f marked) Unwrap() error { + return f.wrapped +} + +func (f marked) Error() string { + return f.mark.Error() + ": " + f.wrapped.Error() +} diff --git a/pkg/http/middleware/mcp_parse.go b/pkg/http/middleware/mcp_parse.go new file mode 100644 index 0000000000..c82616b270 --- /dev/null +++ b/pkg/http/middleware/mcp_parse.go @@ -0,0 +1,126 @@ +package middleware + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + + ghcontext "github.com/github/github-mcp-server/pkg/context" +) + +// mcpJSONRPCRequest represents the structure of an MCP JSON-RPC request. +// We only parse the fields needed for routing and optimization. +type mcpJSONRPCRequest struct { + JSONRPC string `json:"jsonrpc"` + Method string `json:"method"` + Params struct { + // For tools/call + Name string `json:"name,omitempty"` + Arguments json.RawMessage `json:"arguments,omitempty"` + // For prompts/get + // Name is shared with tools/call + // For resources/read + URI string `json:"uri,omitempty"` + } `json:"params"` +} + +// WithMCPParse creates a middleware that parses MCP JSON-RPC requests early in the +// request lifecycle and stores the parsed information in the request context. +// This enables: +// - Registry filtering via ForMCPRequest (only register needed tools/resources/prompts) +// - Avoiding duplicate JSON parsing in downstream middlewares +// - Access to owner/repo for secret-scanning middleware +// +// The middleware reads the request body, parses it, restores the body for downstream +// handlers, and stores the parsed MCPMethodInfo in the request context. +func WithMCPParse() func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + fn := func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // Skip health check endpoints + if r.URL.Path == "/_ping" { + next.ServeHTTP(w, r) + return + } + + // Only parse POST requests (MCP uses JSON-RPC over POST) + if r.Method != http.MethodPost { + next.ServeHTTP(w, r) + return + } + + // Read the request body + body, err := io.ReadAll(r.Body) + if err != nil { + // Log but continue - don't block requests on parse errors + next.ServeHTTP(w, r) + return + } + + // Restore the body for downstream handlers + r.Body = io.NopCloser(bytes.NewReader(body)) + + // Skip empty bodies + if len(body) == 0 { + next.ServeHTTP(w, r) + return + } + + // Parse the JSON-RPC request + var mcpReq mcpJSONRPCRequest + err = json.Unmarshal(body, &mcpReq) + if err != nil { + // Log but continue - could be a non-MCP request or malformed JSON + next.ServeHTTP(w, r) + return + } + + // Skip if not a valid JSON-RPC 2.0 request + if mcpReq.JSONRPC != "2.0" || mcpReq.Method == "" { + next.ServeHTTP(w, r) + return + } + + // Build the MCPMethodInfo + methodInfo := &ghcontext.MCPMethodInfo{ + Method: mcpReq.Method, + } + + // Extract item name based on method type + + switch mcpReq.Method { + case "tools/call": + methodInfo.ItemName = mcpReq.Params.Name + // Parse arguments if present + if len(mcpReq.Params.Arguments) > 0 { + var args map[string]any + err := json.Unmarshal(mcpReq.Params.Arguments, &args) + if err == nil { + methodInfo.Arguments = args + // Extract owner and repo if present + if owner, ok := args["owner"].(string); ok { + methodInfo.Owner = owner + } + if repo, ok := args["repo"].(string); ok { + methodInfo.Repo = repo + } + } + } + case "prompts/get": + methodInfo.ItemName = mcpReq.Params.Name + case "resources/read": + methodInfo.ItemName = mcpReq.Params.URI + default: + // Whatever + } + + // Store the parsed info in context + ctx = ghcontext.WithMCPMethodInfo(ctx, methodInfo) + + next.ServeHTTP(w, r.WithContext(ctx)) + } + return http.HandlerFunc(fn) + } +} diff --git a/pkg/http/middleware/mcp_parse_test.go b/pkg/http/middleware/mcp_parse_test.go new file mode 100644 index 0000000000..5a28a30c3b --- /dev/null +++ b/pkg/http/middleware/mcp_parse_test.go @@ -0,0 +1,191 @@ +package middleware + +import ( + "io" + "net/http" + "net/http/httptest" + "strings" + "testing" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWithMCPParse(t *testing.T) { + tests := []struct { + name string + method string + path string + body string + expectInfo bool + expectedMethod string + expectedItem string + expectedOwner string + expectedRepo string + expectedArgs map[string]any + }{ + { + name: "health check path is skipped", + method: http.MethodPost, + path: "/_ping", + body: `{"jsonrpc":"2.0","method":"tools/list"}`, + expectInfo: false, + }, + { + name: "GET request is skipped", + method: http.MethodGet, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":"tools/list"}`, + expectInfo: false, + }, + { + name: "empty body is skipped", + method: http.MethodPost, + path: "/mcp", + body: "", + expectInfo: false, + }, + { + name: "invalid JSON is skipped", + method: http.MethodPost, + path: "/mcp", + body: "not valid json", + expectInfo: false, + }, + { + name: "non-JSON-RPC 2.0 is skipped", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"1.0","method":"tools/list"}`, + expectInfo: false, + }, + { + name: "empty method is skipped", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":""}`, + expectInfo: false, + }, + { + name: "tools/list parses method only", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":"tools/list"}`, + expectInfo: true, + expectedMethod: "tools/list", + }, + { + name: "tools/call parses name", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":"tools/call","params":{"name":"get_file_contents"}}`, + expectInfo: true, + expectedMethod: "tools/call", + expectedItem: "get_file_contents", + }, + { + name: "tools/call parses owner and repo from arguments", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":"tools/call","params":{"name":"get_file_contents","arguments":{"owner":"github","repo":"github-mcp-server","path":"README.md"}}}`, + expectInfo: true, + expectedMethod: "tools/call", + expectedItem: "get_file_contents", + expectedOwner: "github", + expectedRepo: "github-mcp-server", + expectedArgs: map[string]any{"owner": "github", "repo": "github-mcp-server", "path": "README.md"}, + }, + { + name: "tools/call with invalid arguments JSON continues without args", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":"tools/call","params":{"name":"get_file_contents","arguments":"not an object"}}`, + expectInfo: true, + expectedMethod: "tools/call", + expectedItem: "get_file_contents", + }, + { + name: "prompts/get parses name", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":"prompts/get","params":{"name":"my_prompt"}}`, + expectInfo: true, + expectedMethod: "prompts/get", + expectedItem: "my_prompt", + }, + { + name: "resources/read parses URI as item name", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":"resources/read","params":{"uri":"repo://github/github-mcp-server"}}`, + expectInfo: true, + expectedMethod: "resources/read", + expectedItem: "repo://github/github-mcp-server", + }, + { + name: "initialize method parses correctly", + method: http.MethodPost, + path: "/mcp", + body: `{"jsonrpc":"2.0","method":"initialize","params":{"capabilities":{}}}`, + expectInfo: true, + expectedMethod: "initialize", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var capturedInfo *ghcontext.MCPMethodInfo + var infoCaptured bool + + // Create a handler that captures the MCPMethodInfo from context + nextHandler := http.HandlerFunc(func(_ http.ResponseWriter, r *http.Request) { + capturedInfo, infoCaptured = ghcontext.MCPMethod(r.Context()) + }) + + middleware := WithMCPParse() + handler := middleware(nextHandler) + + req := httptest.NewRequest(tt.method, tt.path, strings.NewReader(tt.body)) + rr := httptest.NewRecorder() + + handler.ServeHTTP(rr, req) + + if tt.expectInfo { + require.True(t, infoCaptured, "MCPMethodInfo should be present in context") + require.NotNil(t, capturedInfo) + assert.Equal(t, tt.expectedMethod, capturedInfo.Method) + assert.Equal(t, tt.expectedItem, capturedInfo.ItemName) + assert.Equal(t, tt.expectedOwner, capturedInfo.Owner) + assert.Equal(t, tt.expectedRepo, capturedInfo.Repo) + if tt.expectedArgs != nil { + assert.Equal(t, tt.expectedArgs, capturedInfo.Arguments) + } + } else { + assert.False(t, infoCaptured, "MCPMethodInfo should not be present in context") + } + }) + } +} + +func TestWithMCPParse_BodyRestoration(t *testing.T) { + originalBody := `{"jsonrpc":"2.0","method":"tools/call","params":{"name":"test_tool"}}` + + var capturedBody string + + nextHandler := http.HandlerFunc(func(_ http.ResponseWriter, r *http.Request) { + body, err := io.ReadAll(r.Body) + require.NoError(t, err) + capturedBody = string(body) + }) + + middleware := WithMCPParse() + handler := middleware(nextHandler) + + req := httptest.NewRequest(http.MethodPost, "/mcp", strings.NewReader(originalBody)) + rr := httptest.NewRecorder() + + handler.ServeHTTP(rr, req) + + assert.Equal(t, originalBody, capturedBody, "body should be restored for downstream handlers") +} diff --git a/pkg/http/middleware/pat_scope.go b/pkg/http/middleware/pat_scope.go new file mode 100644 index 0000000000..bb1efdc011 --- /dev/null +++ b/pkg/http/middleware/pat_scope.go @@ -0,0 +1,54 @@ +package middleware + +import ( + "log/slog" + "net/http" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/scopes" + "github.com/github/github-mcp-server/pkg/utils" +) + +// WithPATScopes is a middleware that fetches and stores scopes for classic Personal Access Tokens (PATs) in the request context. +func WithPATScopes(logger *slog.Logger, scopeFetcher scopes.FetcherInterface) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + fn := func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + tokenInfo, ok := ghcontext.GetTokenInfo(ctx) + if !ok || tokenInfo == nil { + logger.Warn("no token info found in context") + next.ServeHTTP(w, r) + return + } + + // Fetch token scopes for scope-based tool filtering (PAT tokens only) + // Only classic PATs (ghp_ prefix) return OAuth scopes via X-OAuth-Scopes header. + // Fine-grained PATs and other token types don't support this, so we skip filtering. + if tokenInfo.TokenType == utils.TokenTypePersonalAccessToken { + existingScopes, ok := ghcontext.GetTokenScopes(ctx) + if ok { + logger.Debug("using existing scopes from context", "scopes", existingScopes) + next.ServeHTTP(w, r) + return + } + + scopesList, err := scopeFetcher.FetchTokenScopes(ctx, tokenInfo.Token) + if err != nil { + logger.Warn("failed to fetch PAT scopes", "error", err) + next.ServeHTTP(w, r) + return + } + + // Store fetched scopes in context for downstream use + ctx = ghcontext.WithTokenScopes(ctx, scopesList) + + next.ServeHTTP(w, r.WithContext(ctx)) + return + } + + next.ServeHTTP(w, r) + } + return http.HandlerFunc(fn) + } +} diff --git a/pkg/http/middleware/pat_scope_test.go b/pkg/http/middleware/pat_scope_test.go new file mode 100644 index 0000000000..0607b8cf2b --- /dev/null +++ b/pkg/http/middleware/pat_scope_test.go @@ -0,0 +1,190 @@ +package middleware + +import ( + "context" + "errors" + "log/slog" + "net/http" + "net/http/httptest" + "testing" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/utils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// mockScopeFetcher is a mock implementation of scopes.FetcherInterface +type mockScopeFetcher struct { + scopes []string + err error +} + +func (m *mockScopeFetcher) FetchTokenScopes(_ context.Context, _ string) ([]string, error) { + return m.scopes, m.err +} + +func TestWithPATScopes(t *testing.T) { + logger := slog.Default() + + tests := []struct { + name string + tokenInfo *ghcontext.TokenInfo + fetcherScopes []string + fetcherErr error + expectScopesFetched bool + expectedScopes []string + expectNextHandlerCalled bool + }{ + { + name: "no token info in context calls next handler", + tokenInfo: nil, + expectScopesFetched: false, + expectedScopes: nil, + expectNextHandlerCalled: true, + }, + { + name: "non-PAT token type skips scope fetching", + tokenInfo: &ghcontext.TokenInfo{ + Token: "gho_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + TokenType: utils.TokenTypeOAuthAccessToken, + }, + expectScopesFetched: false, + expectedScopes: nil, + expectNextHandlerCalled: true, + }, + { + name: "fine-grained PAT skips scope fetching", + tokenInfo: &ghcontext.TokenInfo{ + Token: "github_pat_xxxxxxxxxxxxxxxxxxxxxxx", + TokenType: utils.TokenTypeFineGrainedPersonalAccessToken, + }, + expectScopesFetched: false, + expectedScopes: nil, + expectNextHandlerCalled: true, + }, + { + name: "classic PAT fetches and stores scopes", + tokenInfo: &ghcontext.TokenInfo{ + Token: "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + TokenType: utils.TokenTypePersonalAccessToken, + }, + fetcherScopes: []string{"repo", "user", "read:org"}, + expectScopesFetched: true, + expectedScopes: []string{"repo", "user", "read:org"}, + expectNextHandlerCalled: true, + }, + { + name: "classic PAT with empty scopes", + tokenInfo: &ghcontext.TokenInfo{ + Token: "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + TokenType: utils.TokenTypePersonalAccessToken, + }, + fetcherScopes: []string{}, + expectScopesFetched: true, + expectedScopes: []string{}, + expectNextHandlerCalled: true, + }, + { + name: "fetcher error calls next handler without scopes", + tokenInfo: &ghcontext.TokenInfo{ + Token: "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + TokenType: utils.TokenTypePersonalAccessToken, + }, + fetcherErr: errors.New("network error"), + expectScopesFetched: false, + expectedScopes: nil, + expectNextHandlerCalled: true, + }, + { + name: "old-style PAT (40 hex chars) fetches scopes", + tokenInfo: &ghcontext.TokenInfo{ + Token: "0123456789abcdef0123456789abcdef01234567", + TokenType: utils.TokenTypePersonalAccessToken, + }, + fetcherScopes: []string{"repo"}, + expectScopesFetched: true, + expectedScopes: []string{"repo"}, + expectNextHandlerCalled: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var capturedScopes []string + var scopesFound bool + var nextHandlerCalled bool + + nextHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + nextHandlerCalled = true + capturedScopes, scopesFound = ghcontext.GetTokenScopes(r.Context()) + w.WriteHeader(http.StatusOK) + }) + + fetcher := &mockScopeFetcher{ + scopes: tt.fetcherScopes, + err: tt.fetcherErr, + } + + middleware := WithPATScopes(logger, fetcher) + handler := middleware(nextHandler) + + req := httptest.NewRequest(http.MethodGet, "/test", nil) + + // Set up context with token info if provided + if tt.tokenInfo != nil { + ctx := ghcontext.WithTokenInfo(req.Context(), tt.tokenInfo) + req = req.WithContext(ctx) + } + + rr := httptest.NewRecorder() + handler.ServeHTTP(rr, req) + + assert.Equal(t, tt.expectNextHandlerCalled, nextHandlerCalled, "next handler called mismatch") + + if tt.expectNextHandlerCalled { + assert.Equal(t, tt.expectScopesFetched, scopesFound, "scopes found mismatch") + assert.Equal(t, tt.expectedScopes, capturedScopes) + } + }) + } +} + +func TestWithPATScopes_PreservesExistingTokenInfo(t *testing.T) { + logger := slog.Default() + + var capturedTokenInfo *ghcontext.TokenInfo + var capturedScopes []string + var scopesFound bool + + nextHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + capturedTokenInfo, _ = ghcontext.GetTokenInfo(r.Context()) + capturedScopes, scopesFound = ghcontext.GetTokenScopes(r.Context()) + w.WriteHeader(http.StatusOK) + }) + + fetcher := &mockScopeFetcher{ + scopes: []string{"repo", "user"}, + } + + originalTokenInfo := &ghcontext.TokenInfo{ + Token: "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + TokenType: utils.TokenTypePersonalAccessToken, + } + + middleware := WithPATScopes(logger, fetcher) + handler := middleware(nextHandler) + + req := httptest.NewRequest(http.MethodGet, "/test", nil) + ctx := ghcontext.WithTokenInfo(req.Context(), originalTokenInfo) + req = req.WithContext(ctx) + + rr := httptest.NewRecorder() + handler.ServeHTTP(rr, req) + + require.NotNil(t, capturedTokenInfo) + assert.Equal(t, originalTokenInfo.Token, capturedTokenInfo.Token) + assert.Equal(t, originalTokenInfo.TokenType, capturedTokenInfo.TokenType) + assert.True(t, scopesFound) + assert.Equal(t, []string{"repo", "user"}, capturedScopes) +} diff --git a/pkg/http/middleware/request_config.go b/pkg/http/middleware/request_config.go new file mode 100644 index 0000000000..a7311334d3 --- /dev/null +++ b/pkg/http/middleware/request_config.go @@ -0,0 +1,64 @@ +package middleware + +import ( + "net/http" + "slices" + "strings" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/http/headers" +) + +// WithRequestConfig is a middleware that extracts MCP-related headers and sets them in the request context. +// This includes readonly mode, toolsets, tools, lockdown mode, insiders mode, and feature flags. +func WithRequestConfig(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // Readonly mode + if relaxedParseBool(r.Header.Get(headers.MCPReadOnlyHeader)) { + ctx = ghcontext.WithReadonly(ctx, true) + } + + // Toolsets + if toolsets := headers.ParseCommaSeparated(r.Header.Get(headers.MCPToolsetsHeader)); len(toolsets) > 0 { + ctx = ghcontext.WithToolsets(ctx, toolsets) + } + + // Tools + if tools := headers.ParseCommaSeparated(r.Header.Get(headers.MCPToolsHeader)); len(tools) > 0 { + ctx = ghcontext.WithTools(ctx, tools) + } + + // Lockdown mode + if relaxedParseBool(r.Header.Get(headers.MCPLockdownHeader)) { + ctx = ghcontext.WithLockdownMode(ctx, true) + } + + // Excluded tools + if excludeTools := headers.ParseCommaSeparated(r.Header.Get(headers.MCPExcludeToolsHeader)); len(excludeTools) > 0 { + ctx = ghcontext.WithExcludeTools(ctx, excludeTools) + } + + // Insiders mode + if relaxedParseBool(r.Header.Get(headers.MCPInsidersHeader)) { + ctx = ghcontext.WithInsidersMode(ctx, true) + } + + // Feature flags + if features := headers.ParseCommaSeparated(r.Header.Get(headers.MCPFeaturesHeader)); len(features) > 0 { + ctx = ghcontext.WithHeaderFeatures(ctx, features) + } + + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// relaxedParseBool parses a string into a boolean value, treating various +// common false values or empty strings as false, and everything else as true. +// It is case-insensitive and trims whitespace. +func relaxedParseBool(s string) bool { + s = strings.TrimSpace(strings.ToLower(s)) + falseValues := []string{"", "false", "0", "no", "off", "n", "f"} + return !slices.Contains(falseValues, s) +} diff --git a/pkg/http/middleware/scope_challenge.go b/pkg/http/middleware/scope_challenge.go new file mode 100644 index 0000000000..1a86bf93ce --- /dev/null +++ b/pkg/http/middleware/scope_challenge.go @@ -0,0 +1,145 @@ +package middleware + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/http/oauth" + "github.com/github/github-mcp-server/pkg/scopes" + "github.com/github/github-mcp-server/pkg/utils" +) + +// WithScopeChallenge creates a new middleware that determines if an OAuth request contains sufficient scopes to +// complete the request and returns a scope challenge if not. +func WithScopeChallenge(oauthCfg *oauth.Config, scopeFetcher scopes.FetcherInterface) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + fn := func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // Skip health check endpoints + if r.URL.Path == "/_ping" { + next.ServeHTTP(w, r) + return + } + + // Get user from context + tokenInfo, ok := ghcontext.GetTokenInfo(ctx) + if !ok { + next.ServeHTTP(w, r) + return + } + + // Only check OAuth tokens - scope challenge allows OAuth apps to request additional scopes + if tokenInfo.TokenType != utils.TokenTypeOAuthAccessToken { + next.ServeHTTP(w, r) + return + } + + // Try to use pre-parsed MCP method info first (performance optimization) + // This avoids re-parsing the JSON body if WithMCPParse middleware ran earlier + var toolName string + if methodInfo, ok := ghcontext.MCPMethod(ctx); ok && methodInfo != nil { + // Only check tools/call requests + if methodInfo.Method != "tools/call" { + next.ServeHTTP(w, r) + return + } + toolName = methodInfo.ItemName + } else { + // Fallback: parse the request body directly + body, err := io.ReadAll(r.Body) + if err != nil { + next.ServeHTTP(w, r) + return + } + r.Body = io.NopCloser(bytes.NewReader(body)) + + var mcpRequest struct { + JSONRPC string `json:"jsonrpc"` + Method string `json:"method"` + Params struct { + Name string `json:"name,omitempty"` + Arguments map[string]any `json:"arguments,omitempty"` + } `json:"params"` + } + + err = json.Unmarshal(body, &mcpRequest) + if err != nil { + next.ServeHTTP(w, r) + return + } + + // Only check tools/call requests + if mcpRequest.Method != "tools/call" { + next.ServeHTTP(w, r) + return + } + + toolName = mcpRequest.Params.Name + } + toolScopeInfo, err := scopes.GetToolScopeInfo(toolName) + if err != nil { + next.ServeHTTP(w, r) + return + } + + // If tool not found in scope map, allow the request + if toolScopeInfo == nil { + next.ServeHTTP(w, r) + return + } + + // Get OAuth scopes for Token. First check if scopes are already in context, then fetch from GitHub if not present. + // This allows Remote Server to pass scope info to avoid redundant GitHub API calls. + activeScopes, ok := ghcontext.GetTokenScopes(ctx) + if !ok || (len(activeScopes) == 0 && tokenInfo.Token != "") { + activeScopes, err = scopeFetcher.FetchTokenScopes(ctx, tokenInfo.Token) + if err != nil { + next.ServeHTTP(w, r) + return + } + } + + // Store active scopes in context for downstream use + ctx = ghcontext.WithTokenScopes(ctx, activeScopes) + r = r.WithContext(ctx) + + // Check if user has the required scopes + if toolScopeInfo.HasAcceptedScope(activeScopes...) { + next.ServeHTTP(w, r) + return + } + + // User lacks required scopes - get the scopes they need + requiredScopes := toolScopeInfo.GetRequiredScopesSlice() + + // Build the resource metadata URL using the shared utility + // GetEffectiveResourcePath returns the original path (e.g., /mcp or /mcp/x/all) + // which is used to construct the well-known OAuth protected resource URL + resourcePath := oauth.ResolveResourcePath(r, oauthCfg) + resourceMetadataURL := oauth.BuildResourceMetadataURL(r, oauthCfg, resourcePath) + + // Build recommended scopes: existing scopes + required scopes + recommendedScopes := make([]string, 0, len(activeScopes)+len(requiredScopes)) + recommendedScopes = append(recommendedScopes, activeScopes...) + recommendedScopes = append(recommendedScopes, requiredScopes...) + + // Build the WWW-Authenticate header value + wwwAuthenticateHeader := fmt.Sprintf(`Bearer error="insufficient_scope", scope=%q, resource_metadata=%q, error_description=%q`, + strings.Join(recommendedScopes, " "), + resourceMetadataURL, + "Additional scopes required: "+strings.Join(requiredScopes, ", "), + ) + + // Send scope challenge response with the superset of existing and required scopes + w.Header().Set("WWW-Authenticate", wwwAuthenticateHeader) + http.Error(w, "Forbidden: insufficient scopes", http.StatusForbidden) + } + return http.HandlerFunc(fn) + } +} diff --git a/pkg/http/middleware/token.go b/pkg/http/middleware/token.go new file mode 100644 index 0000000000..012bbabef2 --- /dev/null +++ b/pkg/http/middleware/token.go @@ -0,0 +1,56 @@ +package middleware + +import ( + "errors" + "fmt" + "net/http" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/http/oauth" + "github.com/github/github-mcp-server/pkg/utils" +) + +func ExtractUserToken(oauthCfg *oauth.Config) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // Check if token info already exists in context, if it does, skip extraction. + // In remote setup, we may have already extracted token info earlier. + if _, ok := ghcontext.GetTokenInfo(ctx); ok { + // Token info already exists in context, skip extraction + next.ServeHTTP(w, r) + return + } + + tokenType, token, err := utils.ParseAuthorizationHeader(r) + if err != nil { + // For missing Authorization header, return 401 with WWW-Authenticate header per MCP spec + if errors.Is(err, utils.ErrMissingAuthorizationHeader) { + sendAuthChallenge(w, r, oauthCfg) + return + } + // For other auth errors (bad format, unsupported), return 400 + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + ctx = ghcontext.WithTokenInfo(ctx, &ghcontext.TokenInfo{ + Token: token, + TokenType: tokenType, + }) + r = r.WithContext(ctx) + + next.ServeHTTP(w, r) + }) + } +} + +// sendAuthChallenge sends a 401 Unauthorized response with WWW-Authenticate header +// containing the OAuth protected resource metadata URL as per RFC 6750 and MCP spec. +func sendAuthChallenge(w http.ResponseWriter, r *http.Request, oauthCfg *oauth.Config) { + resourcePath := oauth.ResolveResourcePath(r, oauthCfg) + resourceMetadataURL := oauth.BuildResourceMetadataURL(r, oauthCfg, resourcePath) + w.Header().Set("WWW-Authenticate", fmt.Sprintf(`Bearer resource_metadata=%q`, resourceMetadataURL)) + http.Error(w, "Unauthorized", http.StatusUnauthorized) +} diff --git a/pkg/http/middleware/token_test.go b/pkg/http/middleware/token_test.go new file mode 100644 index 0000000000..fa8f0ee98e --- /dev/null +++ b/pkg/http/middleware/token_test.go @@ -0,0 +1,321 @@ +package middleware + +import ( + "net/http" + "net/http/httptest" + "testing" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/http/headers" + "github.com/github/github-mcp-server/pkg/http/oauth" + "github.com/github/github-mcp-server/pkg/utils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestExtractUserToken(t *testing.T) { + oauthCfg := &oauth.Config{ + BaseURL: "https://example.com", + AuthorizationServer: "https://github.com/login/oauth", + } + + tests := []struct { + name string + authHeader string + expectedStatusCode int + expectedTokenType utils.TokenType + expectedToken string + expectTokenInfo bool + expectWWWAuth bool + }{ + // Missing authorization header + { + name: "missing Authorization header returns 401 with WWW-Authenticate", + authHeader: "", + expectedStatusCode: http.StatusUnauthorized, + expectTokenInfo: false, + expectWWWAuth: true, + }, + // Personal Access Token (classic) - ghp_ prefix + { + name: "personal access token (classic) with Bearer prefix", + authHeader: "Bearer ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypePersonalAccessToken, + expectedToken: "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + { + name: "personal access token (classic) with bearer lowercase", + authHeader: "bearer ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypePersonalAccessToken, + expectedToken: "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + { + name: "personal access token (classic) without Bearer prefix", + authHeader: "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypePersonalAccessToken, + expectedToken: "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + // Fine-grained Personal Access Token - github_pat_ prefix + { + name: "fine-grained personal access token with Bearer prefix", + authHeader: "Bearer github_pat_xxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypeFineGrainedPersonalAccessToken, + expectedToken: "github_pat_xxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + { + name: "fine-grained personal access token without Bearer prefix", + authHeader: "github_pat_xxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypeFineGrainedPersonalAccessToken, + expectedToken: "github_pat_xxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + // OAuth Access Token - gho_ prefix + { + name: "OAuth access token with Bearer prefix", + authHeader: "Bearer gho_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypeOAuthAccessToken, + expectedToken: "gho_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + { + name: "OAuth access token without Bearer prefix", + authHeader: "gho_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypeOAuthAccessToken, + expectedToken: "gho_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + // User-to-Server GitHub App Token - ghu_ prefix + { + name: "user-to-server GitHub App token with Bearer prefix", + authHeader: "Bearer ghu_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypeUserToServerGitHubAppToken, + expectedToken: "ghu_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + { + name: "user-to-server GitHub App token without Bearer prefix", + authHeader: "ghu_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypeUserToServerGitHubAppToken, + expectedToken: "ghu_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + // Server-to-Server GitHub App Token (installation token) - ghs_ prefix + { + name: "server-to-server GitHub App token with Bearer prefix", + authHeader: "Bearer ghs_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypeServerToServerGitHubAppToken, + expectedToken: "ghs_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + { + name: "server-to-server GitHub App token without Bearer prefix", + authHeader: "ghs_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypeServerToServerGitHubAppToken, + expectedToken: "ghs_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + expectTokenInfo: true, + }, + // Old-style Personal Access Token (40 hex characters, pre-2021) + { + name: "old-style personal access token (40 hex chars) with Bearer prefix", + authHeader: "Bearer 0123456789abcdef0123456789abcdef01234567", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypePersonalAccessToken, + expectedToken: "0123456789abcdef0123456789abcdef01234567", + expectTokenInfo: true, + }, + { + name: "old-style personal access token (40 hex chars) without Bearer prefix", + authHeader: "0123456789abcdef0123456789abcdef01234567", + expectedStatusCode: http.StatusOK, + expectedTokenType: utils.TokenTypePersonalAccessToken, + expectedToken: "0123456789abcdef0123456789abcdef01234567", + expectTokenInfo: true, + }, + // Error cases + { + name: "unsupported GitHub-Bearer header returns 400", + authHeader: "GitHub-Bearer some_encrypted_token", + expectedStatusCode: http.StatusBadRequest, + expectTokenInfo: false, + }, + { + name: "invalid token format returns 400", + authHeader: "Bearer invalid_token_format", + expectedStatusCode: http.StatusBadRequest, + expectTokenInfo: false, + }, + { + name: "unrecognized prefix returns 400", + authHeader: "Bearer xyz_notavalidprefix", + expectedStatusCode: http.StatusBadRequest, + expectTokenInfo: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var capturedTokenInfo *ghcontext.TokenInfo + var tokenInfoCaptured bool + + nextHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + capturedTokenInfo, tokenInfoCaptured = ghcontext.GetTokenInfo(r.Context()) + w.WriteHeader(http.StatusOK) + }) + + middleware := ExtractUserToken(oauthCfg) + handler := middleware(nextHandler) + + req := httptest.NewRequest(http.MethodGet, "/test", nil) + if tt.authHeader != "" { + req.Header.Set(headers.AuthorizationHeader, tt.authHeader) + } + rr := httptest.NewRecorder() + + handler.ServeHTTP(rr, req) + + assert.Equal(t, tt.expectedStatusCode, rr.Code) + + if tt.expectWWWAuth { + wwwAuth := rr.Header().Get("WWW-Authenticate") + assert.NotEmpty(t, wwwAuth, "expected WWW-Authenticate header") + assert.Contains(t, wwwAuth, "Bearer resource_metadata=") + } + + if tt.expectTokenInfo { + require.True(t, tokenInfoCaptured, "expected TokenInfo to be present in context") + require.NotNil(t, capturedTokenInfo) + assert.Equal(t, tt.expectedTokenType, capturedTokenInfo.TokenType) + assert.Equal(t, tt.expectedToken, capturedTokenInfo.Token) + } else { + assert.False(t, tokenInfoCaptured, "expected no TokenInfo in context") + } + }) + } +} + +func TestExtractUserToken_NilOAuthConfig(t *testing.T) { + var capturedTokenInfo *ghcontext.TokenInfo + var tokenInfoCaptured bool + + nextHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + capturedTokenInfo, tokenInfoCaptured = ghcontext.GetTokenInfo(r.Context()) + w.WriteHeader(http.StatusOK) + }) + + middleware := ExtractUserToken(nil) + handler := middleware(nextHandler) + + req := httptest.NewRequest(http.MethodGet, "/test", nil) + req.Header.Set(headers.AuthorizationHeader, "Bearer ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx") + rr := httptest.NewRecorder() + + handler.ServeHTTP(rr, req) + + assert.Equal(t, http.StatusOK, rr.Code) + require.True(t, tokenInfoCaptured) + require.NotNil(t, capturedTokenInfo) + assert.Equal(t, utils.TokenTypePersonalAccessToken, capturedTokenInfo.TokenType) +} + +func TestExtractUserToken_MissingAuthHeader_WWWAuthenticateFormat(t *testing.T) { + oauthCfg := &oauth.Config{ + BaseURL: "https://api.example.com", + AuthorizationServer: "https://github.com/login/oauth", + ResourcePath: "/mcp", + } + + nextHandler := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + }) + + middleware := ExtractUserToken(oauthCfg) + handler := middleware(nextHandler) + + req := httptest.NewRequest(http.MethodGet, "/test", nil) + // No Authorization header + rr := httptest.NewRecorder() + + handler.ServeHTTP(rr, req) + + assert.Equal(t, http.StatusUnauthorized, rr.Code) + wwwAuth := rr.Header().Get("WWW-Authenticate") + assert.NotEmpty(t, wwwAuth) + assert.Contains(t, wwwAuth, "Bearer") + assert.Contains(t, wwwAuth, "resource_metadata=") + assert.Contains(t, wwwAuth, "/.well-known/oauth-protected-resource") +} + +func TestSendAuthChallenge(t *testing.T) { + tests := []struct { + name string + oauthCfg *oauth.Config + requestPath string + expectedContains []string + }{ + { + name: "with base URL configured", + oauthCfg: &oauth.Config{ + BaseURL: "https://mcp.example.com", + }, + requestPath: "/api/test", + expectedContains: []string{ + "Bearer", + "resource_metadata=", + "https://mcp.example.com/.well-known/oauth-protected-resource", + }, + }, + { + name: "with nil config uses request host", + oauthCfg: nil, + requestPath: "/api/test", + expectedContains: []string{ + "Bearer", + "resource_metadata=", + "/.well-known/oauth-protected-resource", + }, + }, + { + name: "with resource path configured", + oauthCfg: &oauth.Config{ + BaseURL: "https://mcp.example.com", + ResourcePath: "/mcp", + }, + requestPath: "/api/test", + expectedContains: []string{ + "Bearer", + "resource_metadata=", + "/mcp", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + rr := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodGet, tt.requestPath, nil) + + sendAuthChallenge(rr, req, tt.oauthCfg) + + assert.Equal(t, http.StatusUnauthorized, rr.Code) + wwwAuth := rr.Header().Get("WWW-Authenticate") + for _, expected := range tt.expectedContains { + assert.Contains(t, wwwAuth, expected) + } + }) + } +} diff --git a/pkg/http/oauth/oauth.go b/pkg/http/oauth/oauth.go new file mode 100644 index 0000000000..5da2535661 --- /dev/null +++ b/pkg/http/oauth/oauth.go @@ -0,0 +1,243 @@ +// Package oauth provides OAuth 2.0 Protected Resource Metadata (RFC 9728) support +// for the GitHub MCP Server HTTP mode. +package oauth + +import ( + "fmt" + "net/http" + "strings" + + "github.com/github/github-mcp-server/pkg/http/headers" + "github.com/go-chi/chi/v5" + "github.com/modelcontextprotocol/go-sdk/auth" + "github.com/modelcontextprotocol/go-sdk/oauthex" +) + +const ( + // OAuthProtectedResourcePrefix is the well-known path prefix for OAuth protected resource metadata. + OAuthProtectedResourcePrefix = "/.well-known/oauth-protected-resource" + + // DefaultAuthorizationServer is GitHub's OAuth authorization server. + DefaultAuthorizationServer = "https://github.com/login/oauth" +) + +// SupportedScopes lists all OAuth scopes that may be required by MCP tools. +var SupportedScopes = []string{ + "repo", + "read:org", + "read:user", + "user:email", + "read:packages", + "write:packages", + "read:project", + "project", + "gist", + "notifications", + "workflow", + "codespace", +} + +// Config holds the OAuth configuration for the MCP server. +type Config struct { + // BaseURL is the publicly accessible URL where this server is hosted. + // This is used to construct the OAuth resource URL. + BaseURL string + + // AuthorizationServer is the OAuth authorization server URL. + // Defaults to GitHub's OAuth server if not specified. + AuthorizationServer string + + // ResourcePath is the externally visible base path for the MCP server (e.g., "/mcp"). + // This is used to restore the original path when a proxy strips a base path before forwarding. + // If empty, requests are treated as already using the external path. + ResourcePath string +} + +// AuthHandler handles OAuth-related HTTP endpoints. +type AuthHandler struct { + cfg *Config +} + +// NewAuthHandler creates a new OAuth auth handler. +func NewAuthHandler(cfg *Config) (*AuthHandler, error) { + if cfg == nil { + cfg = &Config{} + } + + // Default authorization server to GitHub + if cfg.AuthorizationServer == "" { + cfg.AuthorizationServer = DefaultAuthorizationServer + } + + return &AuthHandler{ + cfg: cfg, + }, nil +} + +// routePatterns defines the route patterns for OAuth protected resource metadata. +var routePatterns = []string{ + "", // Root: /.well-known/oauth-protected-resource + "/readonly", // Read-only mode + "/insiders", // Insiders mode + "/x/{toolset}", + "/x/{toolset}/readonly", +} + +// RegisterRoutes registers the OAuth protected resource metadata routes. +func (h *AuthHandler) RegisterRoutes(r chi.Router) { + for _, pattern := range routePatterns { + for _, route := range h.routesForPattern(pattern) { + path := OAuthProtectedResourcePrefix + route + r.Handle(path, h.metadataHandler()) + } + } +} + +func (h *AuthHandler) metadataHandler() http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + resourcePath := resolveResourcePath( + strings.TrimPrefix(r.URL.Path, OAuthProtectedResourcePrefix), + h.cfg.ResourcePath, + ) + resourceURL := h.buildResourceURL(r, resourcePath) + + metadata := &oauthex.ProtectedResourceMetadata{ + Resource: resourceURL, + AuthorizationServers: []string{h.cfg.AuthorizationServer}, + ResourceName: "GitHub MCP Server", + ScopesSupported: SupportedScopes, + BearerMethodsSupported: []string{"header"}, + } + + auth.ProtectedResourceMetadataHandler(metadata).ServeHTTP(w, r) + }) +} + +// routesForPattern generates route variants for a given pattern. +// GitHub strips the /mcp prefix before forwarding, so we register both variants: +// - With /mcp prefix: for direct access or when GitHub doesn't strip +// - Without /mcp prefix: for when GitHub has stripped the prefix +func (h *AuthHandler) routesForPattern(pattern string) []string { + basePaths := []string{""} + if basePath := normalizeBasePath(h.cfg.ResourcePath); basePath != "" { + basePaths = append(basePaths, basePath) + } else { + basePaths = append(basePaths, "/mcp") + } + + routes := make([]string, 0, len(basePaths)*2) + for _, basePath := range basePaths { + routes = append(routes, joinRoute(basePath, pattern)) + routes = append(routes, joinRoute(basePath, pattern)+"/") + } + + return routes +} + +// resolveResourcePath returns the externally visible resource path, +// restoring the configured base path when proxies strip it before forwarding. +func resolveResourcePath(path, basePath string) string { + if path == "" { + path = "/" + } + base := normalizeBasePath(basePath) + if base == "" { + return path + } + if path == "/" { + return base + } + if path == base || strings.HasPrefix(path, base+"/") { + return path + } + return base + path +} + +// ResolveResourcePath returns the externally visible resource path for a request. +// Exported for use by middleware. +func ResolveResourcePath(r *http.Request, cfg *Config) string { + basePath := "" + if cfg != nil { + basePath = cfg.ResourcePath + } + return resolveResourcePath(r.URL.Path, basePath) +} + +// buildResourceURL constructs the full resource URL for OAuth metadata. +func (h *AuthHandler) buildResourceURL(r *http.Request, resourcePath string) string { + host, scheme := GetEffectiveHostAndScheme(r, h.cfg) + baseURL := fmt.Sprintf("%s://%s", scheme, host) + if h.cfg.BaseURL != "" { + baseURL = strings.TrimSuffix(h.cfg.BaseURL, "/") + } + if resourcePath == "" { + resourcePath = "/" + } + if !strings.HasPrefix(resourcePath, "/") { + resourcePath = "/" + resourcePath + } + return baseURL + resourcePath +} + +// GetEffectiveHostAndScheme returns the effective host and scheme for a request. +func GetEffectiveHostAndScheme(r *http.Request, cfg *Config) (host, scheme string) { //nolint:revive + if fh := r.Header.Get(headers.ForwardedHostHeader); fh != "" { + host = fh + } else { + host = r.Host + } + if host == "" { + host = "localhost" + } + if fp := r.Header.Get(headers.ForwardedProtoHeader); fp != "" { + scheme = strings.ToLower(fp) + } else { + if r.TLS != nil { + scheme = "https" + } else { + scheme = "http" + } + } + return +} + +// BuildResourceMetadataURL constructs the full URL to the OAuth protected resource metadata endpoint. +func BuildResourceMetadataURL(r *http.Request, cfg *Config, resourcePath string) string { + host, scheme := GetEffectiveHostAndScheme(r, cfg) + suffix := "" + if resourcePath != "" && resourcePath != "/" { + if !strings.HasPrefix(resourcePath, "/") { + suffix = "/" + resourcePath + } else { + suffix = resourcePath + } + } + if cfg != nil && cfg.BaseURL != "" { + return strings.TrimSuffix(cfg.BaseURL, "/") + OAuthProtectedResourcePrefix + suffix + } + return fmt.Sprintf("%s://%s%s%s", scheme, host, OAuthProtectedResourcePrefix, suffix) +} + +func normalizeBasePath(path string) string { + trimmed := strings.TrimSpace(path) + if trimmed == "" || trimmed == "/" { + return "" + } + if !strings.HasPrefix(trimmed, "/") { + trimmed = "/" + trimmed + } + return strings.TrimSuffix(trimmed, "/") +} + +func joinRoute(basePath, pattern string) string { + if basePath == "" { + return pattern + } + if pattern == "" { + return basePath + } + if before, ok := strings.CutSuffix(basePath, "/"); ok { + return before + pattern + } + return basePath + pattern +} diff --git a/pkg/http/oauth/oauth_test.go b/pkg/http/oauth/oauth_test.go new file mode 100644 index 0000000000..9133e8331a --- /dev/null +++ b/pkg/http/oauth/oauth_test.go @@ -0,0 +1,615 @@ +package oauth + +import ( + "crypto/tls" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/github/github-mcp-server/pkg/http/headers" + "github.com/go-chi/chi/v5" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewAuthHandler(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + cfg *Config + expectedAuthServer string + expectedResourcePath string + }{ + { + name: "nil config uses defaults", + cfg: nil, + expectedAuthServer: DefaultAuthorizationServer, + expectedResourcePath: "", + }, + { + name: "empty config uses defaults", + cfg: &Config{}, + expectedAuthServer: DefaultAuthorizationServer, + expectedResourcePath: "", + }, + { + name: "custom authorization server", + cfg: &Config{ + AuthorizationServer: "https://custom.example.com/oauth", + }, + expectedAuthServer: "https://custom.example.com/oauth", + expectedResourcePath: "", + }, + { + name: "custom base URL and resource path", + cfg: &Config{ + BaseURL: "https://example.com", + ResourcePath: "/mcp", + }, + expectedAuthServer: DefaultAuthorizationServer, + expectedResourcePath: "/mcp", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + handler, err := NewAuthHandler(tc.cfg) + require.NoError(t, err) + require.NotNil(t, handler) + + assert.Equal(t, tc.expectedAuthServer, handler.cfg.AuthorizationServer) + }) + } +} + +func TestGetEffectiveHostAndScheme(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setupRequest func() *http.Request + cfg *Config + expectedHost string + expectedScheme string + }{ + { + name: "basic request without forwarding headers", + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/test", nil) + req.Host = "example.com" + return req + }, + cfg: &Config{}, + expectedHost: "example.com", + expectedScheme: "http", // defaults to http + }, + { + name: "request with X-Forwarded-Host header", + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/test", nil) + req.Host = "internal.example.com" + req.Header.Set(headers.ForwardedHostHeader, "public.example.com") + return req + }, + cfg: &Config{}, + expectedHost: "public.example.com", + expectedScheme: "http", + }, + { + name: "request with X-Forwarded-Proto header", + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/test", nil) + req.Host = "example.com" + req.Header.Set(headers.ForwardedProtoHeader, "http") + return req + }, + cfg: &Config{}, + expectedHost: "example.com", + expectedScheme: "http", + }, + { + name: "request with both forwarding headers", + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/test", nil) + req.Host = "internal.example.com" + req.Header.Set(headers.ForwardedHostHeader, "public.example.com") + req.Header.Set(headers.ForwardedProtoHeader, "https") + return req + }, + cfg: &Config{}, + expectedHost: "public.example.com", + expectedScheme: "https", + }, + { + name: "request with TLS", + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/test", nil) + req.Host = "example.com" + req.TLS = &tls.ConnectionState{} + return req + }, + cfg: &Config{}, + expectedHost: "example.com", + expectedScheme: "https", + }, + { + name: "X-Forwarded-Proto takes precedence over TLS", + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/test", nil) + req.Host = "example.com" + req.TLS = &tls.ConnectionState{} + req.Header.Set(headers.ForwardedProtoHeader, "http") + return req + }, + cfg: &Config{}, + expectedHost: "example.com", + expectedScheme: "http", + }, + { + name: "scheme is lowercased", + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/test", nil) + req.Host = "example.com" + req.Header.Set(headers.ForwardedProtoHeader, "HTTPS") + return req + }, + cfg: &Config{}, + expectedHost: "example.com", + expectedScheme: "https", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + req := tc.setupRequest() + host, scheme := GetEffectiveHostAndScheme(req, tc.cfg) + + assert.Equal(t, tc.expectedHost, host) + assert.Equal(t, tc.expectedScheme, scheme) + }) + } +} + +func TestResolveResourcePath(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + cfg *Config + setupRequest func() *http.Request + expectedPath string + }{ + { + name: "no base path uses request path", + cfg: &Config{}, + setupRequest: func() *http.Request { + return httptest.NewRequest(http.MethodGet, "/x/repos", nil) + }, + expectedPath: "/x/repos", + }, + { + name: "base path restored for root", + cfg: &Config{ + ResourcePath: "/mcp", + }, + setupRequest: func() *http.Request { + return httptest.NewRequest(http.MethodGet, "/", nil) + }, + expectedPath: "/mcp", + }, + { + name: "base path restored for nested", + cfg: &Config{ + ResourcePath: "/mcp", + }, + setupRequest: func() *http.Request { + return httptest.NewRequest(http.MethodGet, "/readonly", nil) + }, + expectedPath: "/mcp/readonly", + }, + { + name: "base path preserved when already present", + cfg: &Config{ + ResourcePath: "/mcp", + }, + setupRequest: func() *http.Request { + return httptest.NewRequest(http.MethodGet, "/mcp/readonly/", nil) + }, + expectedPath: "/mcp/readonly/", + }, + { + name: "custom base path restored", + cfg: &Config{ + ResourcePath: "/api", + }, + setupRequest: func() *http.Request { + return httptest.NewRequest(http.MethodGet, "/x/repos", nil) + }, + expectedPath: "/api/x/repos", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + req := tc.setupRequest() + path := ResolveResourcePath(req, tc.cfg) + + assert.Equal(t, tc.expectedPath, path) + }) + } +} + +func TestBuildResourceMetadataURL(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + cfg *Config + setupRequest func() *http.Request + resourcePath string + expectedURL string + }{ + { + name: "root path", + cfg: &Config{}, + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/", nil) + req.Host = "api.example.com" + return req + }, + resourcePath: "/", + expectedURL: "http://api.example.com/.well-known/oauth-protected-resource", + }, + { + name: "resource path preserves trailing slash", + cfg: &Config{}, + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/mcp/", nil) + req.Host = "api.example.com" + return req + }, + resourcePath: "/mcp/", + expectedURL: "http://api.example.com/.well-known/oauth-protected-resource/mcp/", + }, + { + name: "with custom resource path", + cfg: &Config{}, + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/mcp", nil) + req.Host = "api.example.com" + return req + }, + resourcePath: "/mcp", + expectedURL: "http://api.example.com/.well-known/oauth-protected-resource/mcp", + }, + { + name: "with base URL config", + cfg: &Config{ + BaseURL: "https://custom.example.com", + }, + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/mcp", nil) + req.Host = "api.example.com" + return req + }, + resourcePath: "/mcp", + expectedURL: "https://custom.example.com/.well-known/oauth-protected-resource/mcp", + }, + { + name: "with forwarded headers", + cfg: &Config{}, + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/mcp", nil) + req.Host = "internal.example.com" + req.Header.Set(headers.ForwardedHostHeader, "public.example.com") + req.Header.Set(headers.ForwardedProtoHeader, "https") + return req + }, + resourcePath: "/mcp", + expectedURL: "https://public.example.com/.well-known/oauth-protected-resource/mcp", + }, + { + name: "nil config uses request host", + cfg: nil, + setupRequest: func() *http.Request { + req := httptest.NewRequest(http.MethodGet, "/", nil) + req.Host = "api.example.com" + return req + }, + resourcePath: "", + expectedURL: "http://api.example.com/.well-known/oauth-protected-resource", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + req := tc.setupRequest() + url := BuildResourceMetadataURL(req, tc.cfg, tc.resourcePath) + + assert.Equal(t, tc.expectedURL, url) + }) + } +} + +func TestHandleProtectedResource(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + cfg *Config + path string + host string + method string + expectedStatusCode int + expectedScopes []string + validateResponse func(t *testing.T, body map[string]any) + }{ + { + name: "GET request returns protected resource metadata", + cfg: &Config{ + BaseURL: "https://api.example.com", + }, + path: OAuthProtectedResourcePrefix, + host: "api.example.com", + method: http.MethodGet, + expectedStatusCode: http.StatusOK, + expectedScopes: SupportedScopes, + validateResponse: func(t *testing.T, body map[string]any) { + t.Helper() + assert.Equal(t, "GitHub MCP Server", body["resource_name"]) + assert.Equal(t, "https://api.example.com/", body["resource"]) + + authServers, ok := body["authorization_servers"].([]any) + require.True(t, ok) + require.Len(t, authServers, 1) + assert.Equal(t, DefaultAuthorizationServer, authServers[0]) + }, + }, + { + name: "OPTIONS request for CORS preflight", + cfg: &Config{ + BaseURL: "https://api.example.com", + }, + path: OAuthProtectedResourcePrefix, + host: "api.example.com", + method: http.MethodOptions, + expectedStatusCode: http.StatusNoContent, + }, + { + name: "path with /mcp suffix", + cfg: &Config{ + BaseURL: "https://api.example.com", + }, + path: OAuthProtectedResourcePrefix + "/mcp", + host: "api.example.com", + method: http.MethodGet, + expectedStatusCode: http.StatusOK, + validateResponse: func(t *testing.T, body map[string]any) { + t.Helper() + assert.Equal(t, "https://api.example.com/mcp", body["resource"]) + }, + }, + { + name: "path with /readonly suffix", + cfg: &Config{ + BaseURL: "https://api.example.com", + }, + path: OAuthProtectedResourcePrefix + "/readonly", + host: "api.example.com", + method: http.MethodGet, + expectedStatusCode: http.StatusOK, + validateResponse: func(t *testing.T, body map[string]any) { + t.Helper() + assert.Equal(t, "https://api.example.com/readonly", body["resource"]) + }, + }, + { + name: "path with trailing slash", + cfg: &Config{ + BaseURL: "https://api.example.com", + }, + path: OAuthProtectedResourcePrefix + "/mcp/", + host: "api.example.com", + method: http.MethodGet, + expectedStatusCode: http.StatusOK, + validateResponse: func(t *testing.T, body map[string]any) { + t.Helper() + assert.Equal(t, "https://api.example.com/mcp/", body["resource"]) + }, + }, + { + name: "custom authorization server in response", + cfg: &Config{ + BaseURL: "https://api.example.com", + AuthorizationServer: "https://custom.auth.example.com/oauth", + }, + path: OAuthProtectedResourcePrefix, + host: "api.example.com", + method: http.MethodGet, + expectedStatusCode: http.StatusOK, + validateResponse: func(t *testing.T, body map[string]any) { + t.Helper() + authServers, ok := body["authorization_servers"].([]any) + require.True(t, ok) + require.Len(t, authServers, 1) + assert.Equal(t, "https://custom.auth.example.com/oauth", authServers[0]) + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + handler, err := NewAuthHandler(tc.cfg) + require.NoError(t, err) + + router := chi.NewRouter() + handler.RegisterRoutes(router) + + req := httptest.NewRequest(tc.method, tc.path, nil) + req.Host = tc.host + + rec := httptest.NewRecorder() + router.ServeHTTP(rec, req) + + assert.Equal(t, tc.expectedStatusCode, rec.Code) + + // Check CORS headers + assert.Equal(t, "*", rec.Header().Get("Access-Control-Allow-Origin")) + assert.Contains(t, rec.Header().Get("Access-Control-Allow-Methods"), "GET") + assert.Contains(t, rec.Header().Get("Access-Control-Allow-Methods"), "OPTIONS") + + if tc.method == http.MethodGet && tc.validateResponse != nil { + assert.Equal(t, "application/json", rec.Header().Get("Content-Type")) + + var body map[string]any + err := json.Unmarshal(rec.Body.Bytes(), &body) + require.NoError(t, err) + + tc.validateResponse(t, body) + + // Verify scopes if expected + if tc.expectedScopes != nil { + scopes, ok := body["scopes_supported"].([]any) + require.True(t, ok) + assert.Len(t, scopes, len(tc.expectedScopes)) + } + } + }) + } +} + +func TestRegisterRoutes(t *testing.T) { + t.Parallel() + + handler, err := NewAuthHandler(&Config{ + BaseURL: "https://api.example.com", + }) + require.NoError(t, err) + + router := chi.NewRouter() + handler.RegisterRoutes(router) + + // List of expected routes that should be registered + expectedRoutes := []string{ + OAuthProtectedResourcePrefix, + OAuthProtectedResourcePrefix + "/", + OAuthProtectedResourcePrefix + "/mcp", + OAuthProtectedResourcePrefix + "/mcp/", + OAuthProtectedResourcePrefix + "/readonly", + OAuthProtectedResourcePrefix + "/readonly/", + OAuthProtectedResourcePrefix + "/mcp/readonly", + OAuthProtectedResourcePrefix + "/mcp/readonly/", + OAuthProtectedResourcePrefix + "/x/repos", + OAuthProtectedResourcePrefix + "/mcp/x/repos", + } + + for _, route := range expectedRoutes { + t.Run("route:"+route, func(t *testing.T) { + // Test GET + req := httptest.NewRequest(http.MethodGet, route, nil) + req.Host = "api.example.com" + rec := httptest.NewRecorder() + router.ServeHTTP(rec, req) + assert.Equal(t, http.StatusOK, rec.Code, "GET %s should return 200", route) + + // Test OPTIONS (CORS preflight) + req = httptest.NewRequest(http.MethodOptions, route, nil) + req.Host = "api.example.com" + rec = httptest.NewRecorder() + router.ServeHTTP(rec, req) + assert.Equal(t, http.StatusNoContent, rec.Code, "OPTIONS %s should return 204", route) + }) + } +} + +func TestSupportedScopes(t *testing.T) { + t.Parallel() + + // Verify all expected scopes are present + expectedScopes := []string{ + "repo", + "read:org", + "read:user", + "user:email", + "read:packages", + "write:packages", + "read:project", + "project", + "gist", + "notifications", + "workflow", + "codespace", + } + + assert.Equal(t, expectedScopes, SupportedScopes) +} + +func TestProtectedResourceResponseFormat(t *testing.T) { + t.Parallel() + + handler, err := NewAuthHandler(&Config{ + BaseURL: "https://api.example.com", + }) + require.NoError(t, err) + + router := chi.NewRouter() + handler.RegisterRoutes(router) + + req := httptest.NewRequest(http.MethodGet, OAuthProtectedResourcePrefix, nil) + req.Host = "api.example.com" + + rec := httptest.NewRecorder() + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + var response map[string]any + err = json.Unmarshal(rec.Body.Bytes(), &response) + require.NoError(t, err) + + // Verify all required RFC 9728 fields are present + assert.Contains(t, response, "resource") + assert.Contains(t, response, "authorization_servers") + assert.Contains(t, response, "bearer_methods_supported") + assert.Contains(t, response, "scopes_supported") + + // Verify resource name (optional but we include it) + assert.Contains(t, response, "resource_name") + assert.Equal(t, "GitHub MCP Server", response["resource_name"]) + + // Verify bearer_methods_supported contains "header" + bearerMethods, ok := response["bearer_methods_supported"].([]any) + require.True(t, ok) + assert.Contains(t, bearerMethods, "header") + + // Verify authorization_servers is an array with GitHub OAuth + authServers, ok := response["authorization_servers"].([]any) + require.True(t, ok) + assert.Len(t, authServers, 1) + assert.Equal(t, DefaultAuthorizationServer, authServers[0]) +} + +func TestOAuthProtectedResourcePrefix(t *testing.T) { + t.Parallel() + + // RFC 9728 specifies this well-known path + assert.Equal(t, "/.well-known/oauth-protected-resource", OAuthProtectedResourcePrefix) +} + +func TestDefaultAuthorizationServer(t *testing.T) { + t.Parallel() + + assert.Equal(t, "https://github.com/login/oauth", DefaultAuthorizationServer) +} diff --git a/pkg/http/server.go b/pkg/http/server.go new file mode 100644 index 0000000000..7397e54a88 --- /dev/null +++ b/pkg/http/server.go @@ -0,0 +1,221 @@ +package http + +import ( + "context" + "fmt" + "io" + "log/slog" + "net/http" + "os" + "os/signal" + "slices" + "syscall" + "time" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/github" + "github.com/github/github-mcp-server/pkg/http/oauth" + "github.com/github/github-mcp-server/pkg/inventory" + "github.com/github/github-mcp-server/pkg/lockdown" + "github.com/github/github-mcp-server/pkg/scopes" + "github.com/github/github-mcp-server/pkg/translations" + "github.com/github/github-mcp-server/pkg/utils" + "github.com/go-chi/chi/v5" +) + +// knownFeatureFlags are the feature flags that can be enabled via X-MCP-Features header. +// Only these flags are accepted from headers. +var knownFeatureFlags = []string{} + +type ServerConfig struct { + // Version of the server + Version string + + // GitHub Host to target for API requests (e.g. github.com or github.enterprise.com) + Host string + + // Port to listen on (default: 8082) + Port int + + // BaseURL is the publicly accessible URL of this server for OAuth resource metadata. + // If not set, the server will derive the URL from incoming request headers. + BaseURL string + + // ResourcePath is the externally visible base path for this server (e.g., "/mcp"). + // This is used to restore the original path when a proxy strips a base path before forwarding. + ResourcePath string + + // ExportTranslations indicates if we should export translations + // See: https://github.com/github/github-mcp-server?tab=readme-ov-file#i18n--overriding-descriptions + ExportTranslations bool + + // EnableCommandLogging indicates if we should log commands + EnableCommandLogging bool + + // Path to the log file if not stderr + LogFilePath string + + // Content window size + ContentWindowSize int + + // LockdownMode indicates if we should enable lockdown mode + LockdownMode bool + + // RepoAccessCacheTTL overrides the default TTL for repository access cache entries. + RepoAccessCacheTTL *time.Duration + + // ScopeChallenge indicates if we should return OAuth scope challenges, and if we should perform + // tool filtering based on token scopes. + ScopeChallenge bool +} + +func RunHTTPServer(cfg ServerConfig) error { + // Create app context + ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM) + defer stop() + + t, dumpTranslations := translations.TranslationHelper() + + var slogHandler slog.Handler + var logOutput io.Writer + if cfg.LogFilePath != "" { + file, err := os.OpenFile(cfg.LogFilePath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600) + if err != nil { + return fmt.Errorf("failed to open log file: %w", err) + } + logOutput = file + slogHandler = slog.NewTextHandler(logOutput, &slog.HandlerOptions{Level: slog.LevelDebug}) + } else { + logOutput = os.Stderr + slogHandler = slog.NewTextHandler(logOutput, &slog.HandlerOptions{Level: slog.LevelInfo}) + } + logger := slog.New(slogHandler) + logger.Info("starting server", "version", cfg.Version, "host", cfg.Host, "lockdownEnabled", cfg.LockdownMode) + + apiHost, err := utils.NewAPIHost(cfg.Host) + if err != nil { + return fmt.Errorf("failed to parse API host: %w", err) + } + + repoAccessOpts := []lockdown.RepoAccessOption{ + lockdown.WithLogger(logger.With("component", "lockdown")), + } + if cfg.RepoAccessCacheTTL != nil { + repoAccessOpts = append(repoAccessOpts, lockdown.WithTTL(*cfg.RepoAccessCacheTTL)) + } + + featureChecker := createHTTPFeatureChecker() + + deps := github.NewRequestDeps( + apiHost, + cfg.Version, + cfg.LockdownMode, + repoAccessOpts, + t, + cfg.ContentWindowSize, + featureChecker, + ) + + // Initialize the global tool scope map + err = initGlobalToolScopeMap(t) + if err != nil { + return fmt.Errorf("failed to initialize tool scope map: %w", err) + } + + // Register OAuth protected resource metadata endpoints + oauthCfg := &oauth.Config{ + BaseURL: cfg.BaseURL, + ResourcePath: cfg.ResourcePath, + } + + serverOptions := []HandlerOption{} + if cfg.ScopeChallenge { + scopeFetcher := scopes.NewFetcher(apiHost, scopes.FetcherOptions{}) + serverOptions = append(serverOptions, WithScopeFetcher(scopeFetcher)) + } + + r := chi.NewRouter() + handler := NewHTTPMcpHandler(ctx, &cfg, deps, t, logger, apiHost, append(serverOptions, WithFeatureChecker(featureChecker), WithOAuthConfig(oauthCfg))...) + oauthHandler, err := oauth.NewAuthHandler(oauthCfg) + if err != nil { + return fmt.Errorf("failed to create OAuth handler: %w", err) + } + + r.Group(func(r chi.Router) { + // Register Middleware First, needs to be before route registration + handler.RegisterMiddleware(r) + + // Register MCP server routes + handler.RegisterRoutes(r) + }) + logger.Info("MCP endpoints registered", "baseURL", cfg.BaseURL) + + r.Group(func(r chi.Router) { + // Register OAuth protected resource metadata endpoints + oauthHandler.RegisterRoutes(r) + }) + logger.Info("OAuth protected resource endpoints registered", "baseURL", cfg.BaseURL) + + addr := fmt.Sprintf(":%d", cfg.Port) + httpSvr := http.Server{ + Addr: addr, + Handler: r, + ReadHeaderTimeout: 60 * time.Second, + } + + go func() { + <-ctx.Done() + shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + logger.Info("shutting down server") + if err := httpSvr.Shutdown(shutdownCtx); err != nil { + logger.Error("error during server shutdown", "error", err) + } + }() + + if cfg.ExportTranslations { + // Once server is initialized, all translations are loaded + dumpTranslations() + } + + logger.Info("HTTP server listening", "addr", addr) + if err := httpSvr.ListenAndServe(); err != nil && err != http.ErrServerClosed { + return fmt.Errorf("HTTP server error: %w", err) + } + + logger.Info("server stopped gracefully") + return nil +} + +func initGlobalToolScopeMap(t translations.TranslationHelperFunc) error { + // Build inventory with all tools to extract scope information + inv, err := inventory.NewBuilder(). + SetTools(github.AllTools(t)). + Build() + + if err != nil { + return fmt.Errorf("failed to build inventory for tool scope map: %w", err) + } + + // Initialize the global scope map + scopes.SetToolScopeMapFromInventory(inv) + + return nil +} + +// createHTTPFeatureChecker creates a feature checker that reads header features from context +// and validates them against the knownFeatureFlags whitelist +func createHTTPFeatureChecker() inventory.FeatureFlagChecker { + // Pre-compute whitelist as set for O(1) lookup + knownSet := make(map[string]bool, len(knownFeatureFlags)) + for _, f := range knownFeatureFlags { + knownSet[f] = true + } + + return func(ctx context.Context, flag string) (bool, error) { + if knownSet[flag] && slices.Contains(ghcontext.GetHeaderFeatures(ctx), flag) { + return true, nil + } + return false, nil + } +} diff --git a/pkg/http/transport/bearer.go b/pkg/http/transport/bearer.go new file mode 100644 index 0000000000..66922bbdaa --- /dev/null +++ b/pkg/http/transport/bearer.go @@ -0,0 +1,26 @@ +package transport + +import ( + "net/http" + "strings" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + headers "github.com/github/github-mcp-server/pkg/http/headers" +) + +type BearerAuthTransport struct { + Transport http.RoundTripper + Token string +} + +func (t *BearerAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { + req = req.Clone(req.Context()) + req.Header.Set(headers.AuthorizationHeader, "Bearer "+t.Token) + + // Check for GraphQL-Features in context and add header if present + if features := ghcontext.GetGraphQLFeatures(req.Context()); len(features) > 0 { + req.Header.Set(headers.GraphQLFeaturesHeader, strings.Join(features, ", ")) + } + + return t.Transport.RoundTrip(req) +} diff --git a/pkg/github/transport.go b/pkg/http/transport/graphql_features.go similarity index 69% rename from pkg/github/transport.go rename to pkg/http/transport/graphql_features.go index 0a4372b235..7fe9182fcb 100644 --- a/pkg/github/transport.go +++ b/pkg/http/transport/graphql_features.go @@ -1,8 +1,11 @@ -package github +package transport import ( "net/http" "strings" + + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/http/headers" ) // GraphQLFeaturesTransport is an http.RoundTripper that adds GraphQL-Features @@ -15,14 +18,16 @@ import ( // // Usage: // +// import "github.com/github/github-mcp-server/pkg/http/transport" +// // httpClient := &http.Client{ -// Transport: &github.GraphQLFeaturesTransport{ +// Transport: &transport.GraphQLFeaturesTransport{ // Transport: http.DefaultTransport, // }, // } // gqlClient := githubv4.NewClient(httpClient) // -// Then use withGraphQLFeatures(ctx, "feature_name") when calling GraphQL operations. +// Then use ghcontext.WithGraphQLFeatures(ctx, "feature_name") when calling GraphQL operations. type GraphQLFeaturesTransport struct { // Transport is the underlying HTTP transport. If nil, http.DefaultTransport is used. Transport http.RoundTripper @@ -39,8 +44,8 @@ func (t *GraphQLFeaturesTransport) RoundTrip(req *http.Request) (*http.Response, req = req.Clone(req.Context()) // Check for GraphQL-Features in context and add header if present - if features := GetGraphQLFeatures(req.Context()); len(features) > 0 { - req.Header.Set("GraphQL-Features", strings.Join(features, ", ")) + if features := ghcontext.GetGraphQLFeatures(req.Context()); len(features) > 0 { + req.Header.Set(headers.GraphQLFeaturesHeader, strings.Join(features, ", ")) } return transport.RoundTrip(req) diff --git a/pkg/github/transport_test.go b/pkg/http/transport/graphql_features_test.go similarity index 83% rename from pkg/github/transport_test.go rename to pkg/http/transport/graphql_features_test.go index c981082555..1a0dc4214f 100644 --- a/pkg/github/transport_test.go +++ b/pkg/http/transport/graphql_features_test.go @@ -1,4 +1,4 @@ -package github +package transport import ( "context" @@ -6,6 +6,9 @@ import ( "net/http/httptest" "testing" + ghcontext "github.com/github/github-mcp-server/pkg/context" + "github.com/github/github-mcp-server/pkg/http/headers" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -54,8 +57,8 @@ func TestGraphQLFeaturesTransport(t *testing.T) { // Create a test server that captures the request header server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - capturedHeader = r.Header.Get("GraphQL-Features") - headerExists = r.Header.Get("GraphQL-Features") != "" + capturedHeader = r.Header.Get(headers.GraphQLFeaturesHeader) + headerExists = r.Header.Get(headers.GraphQLFeaturesHeader) != "" w.WriteHeader(http.StatusOK) })) defer server.Close() @@ -68,7 +71,7 @@ func TestGraphQLFeaturesTransport(t *testing.T) { // Create a request ctx := context.Background() if tc.features != nil { - ctx = withGraphQLFeatures(ctx, tc.features...) + ctx = ghcontext.WithGraphQLFeatures(ctx, tc.features...) } req, err := http.NewRequestWithContext(ctx, http.MethodPost, server.URL, nil) @@ -95,7 +98,7 @@ func TestGraphQLFeaturesTransport_NilTransport(t *testing.T) { // Create a test server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - capturedHeader = r.Header.Get("GraphQL-Features") + capturedHeader = r.Header.Get(headers.GraphQLFeaturesHeader) w.WriteHeader(http.StatusOK) })) defer server.Close() @@ -106,7 +109,7 @@ func TestGraphQLFeaturesTransport_NilTransport(t *testing.T) { } // Create a request with features - ctx := withGraphQLFeatures(context.Background(), "test_feature") + ctx := ghcontext.WithGraphQLFeatures(context.Background(), "test_feature") req, err := http.NewRequestWithContext(ctx, http.MethodPost, server.URL, nil) require.NoError(t, err) @@ -134,12 +137,12 @@ func TestGraphQLFeaturesTransport_DoesNotMutateOriginalRequest(t *testing.T) { } // Create a request with features - ctx := withGraphQLFeatures(context.Background(), "test_feature") + ctx := ghcontext.WithGraphQLFeatures(context.Background(), "test_feature") req, err := http.NewRequestWithContext(ctx, http.MethodPost, server.URL, nil) require.NoError(t, err) // Store the original header value - originalHeader := req.Header.Get("GraphQL-Features") + originalHeader := req.Header.Get(headers.GraphQLFeaturesHeader) // Execute the request resp, err := transport.RoundTrip(req) @@ -147,5 +150,5 @@ func TestGraphQLFeaturesTransport_DoesNotMutateOriginalRequest(t *testing.T) { defer resp.Body.Close() // Verify the original request was not mutated - assert.Equal(t, originalHeader, req.Header.Get("GraphQL-Features")) + assert.Equal(t, originalHeader, req.Header.Get(headers.GraphQLFeaturesHeader)) } diff --git a/pkg/http/transport/user_agent.go b/pkg/http/transport/user_agent.go new file mode 100644 index 0000000000..a489941cce --- /dev/null +++ b/pkg/http/transport/user_agent.go @@ -0,0 +1,18 @@ +package transport + +import ( + "net/http" + + "github.com/github/github-mcp-server/pkg/http/headers" +) + +type UserAgentTransport struct { + Transport http.RoundTripper + Agent string +} + +func (t *UserAgentTransport) RoundTrip(req *http.Request) (*http.Response, error) { + req = req.Clone(req.Context()) + req.Header.Set(headers.UserAgentHeader, t.Agent) + return t.Transport.RoundTrip(req) +} diff --git a/pkg/inventory/builder.go b/pkg/inventory/builder.go index ff2d06d5da..d492e69b5a 100644 --- a/pkg/inventory/builder.go +++ b/pkg/inventory/builder.go @@ -2,11 +2,18 @@ package inventory import ( "context" + "errors" "fmt" - "sort" + "maps" + "slices" "strings" ) +var ( + // ErrUnknownTools is returned when tools specified via WithTools() are not recognized. + ErrUnknownTools = errors.New("unknown tools specified in WithTools") +) + // ToolFilter is a function that determines if a tool should be included. // Returns true if the tool should be included, false to exclude it. type ToolFilter func(ctx context.Context, tool *ServerTool) (bool, error) @@ -41,6 +48,7 @@ type Builder struct { featureChecker FeatureFlagChecker filters []ToolFilter // filters to apply to all tools generateInstructions bool + insidersMode bool } // NewBuilder creates a new Builder. @@ -72,9 +80,7 @@ func (b *Builder) SetPrompts(prompts []ServerPrompt) *Builder { // WithDeprecatedAliases adds deprecated tool name aliases that map to canonical names. // Returns self for chaining. func (b *Builder) WithDeprecatedAliases(aliases map[string]string) *Builder { - for oldName, newName := range aliases { - b.deprecatedAliases[oldName] = newName - } + maps.Copy(b.deprecatedAliases, aliases) return b } @@ -135,6 +141,42 @@ func (b *Builder) WithFilter(filter ToolFilter) *Builder { return b } +// WithExcludeTools specifies tools that should be disabled regardless of other settings. +// These tools will be excluded even if their toolset is enabled or they are in the +// additional tools list. This takes precedence over all other tool enablement settings. +// Input is cleaned (trimmed, deduplicated) before applying. +// Returns self for chaining. +func (b *Builder) WithExcludeTools(toolNames []string) *Builder { + cleaned := cleanTools(toolNames) + if len(cleaned) > 0 { + b.filters = append(b.filters, CreateExcludeToolsFilter(cleaned)) + } + return b +} + +// WithInsidersMode enables or disables insiders mode features. +// When insiders mode is disabled (default), UI metadata is removed from tools +// so clients won't attempt to load UI resources. +// Returns self for chaining. +func (b *Builder) WithInsidersMode(enabled bool) *Builder { + b.insidersMode = enabled + return b +} + +// CreateExcludeToolsFilter creates a ToolFilter that excludes tools by name. +// Any tool whose name appears in the excluded list will be filtered out. +// The input slice should already be cleaned (trimmed, deduplicated). +func CreateExcludeToolsFilter(excluded []string) ToolFilter { + set := make(map[string]struct{}, len(excluded)) + for _, name := range excluded { + set[name] = struct{}{} + } + return func(_ context.Context, tool *ServerTool) (bool, error) { + _, blocked := set[tool.Tool.Name] + return !blocked, nil + } +} + // cleanTools trims whitespace and removes duplicates from tool names. // Empty strings after trimming are excluded. func cleanTools(tools []string) []string { @@ -162,8 +204,14 @@ func cleanTools(tools []string) []string { // (i.e., they don't exist in the tool set and are not deprecated aliases). // This ensures invalid tool configurations fail fast at build time. func (b *Builder) Build() (*Inventory, error) { + // When insiders mode is disabled, strip insiders-only features from tools + tools := b.tools + if !b.insidersMode { + tools = stripInsidersFeatures(b.tools) + } + r := &Inventory{ - tools: b.tools, + tools: tools, resourceTemplates: b.resourceTemplates, prompts: b.prompts, deprecatedAliases: b.deprecatedAliases, @@ -176,9 +224,9 @@ func (b *Builder) Build() (*Inventory, error) { r.enabledToolsets, r.unrecognizedToolsets, r.toolsetIDs, r.toolsetIDSet, r.defaultToolsetIDs, r.toolsetDescriptions = b.processToolsets() // Build set of valid tool names for validation - validToolNames := make(map[string]bool, len(b.tools)) - for i := range b.tools { - validToolNames[b.tools[i].Tool.Name] = true + validToolNames := make(map[string]bool, len(tools)) + for i := range tools { + validToolNames[tools[i].Tool.Name] = true } // Process additional tools (clean, resolve aliases, and track unrecognized) @@ -204,7 +252,7 @@ func (b *Builder) Build() (*Inventory, error) { // Error out if there are unrecognized tools if len(unrecognizedTools) > 0 { - return nil, fmt.Errorf("unrecognized tools: %s", strings.Join(unrecognizedTools, ", ")) + return nil, fmt.Errorf("%w: %s", ErrUnknownTools, strings.Join(unrecognizedTools, ", ")) } } @@ -264,13 +312,13 @@ func (b *Builder) processToolsets() (map[ToolsetID]bool, []string, []ToolsetID, for id := range validIDs { allToolsetIDs = append(allToolsetIDs, id) } - sort.Slice(allToolsetIDs, func(i, j int) bool { return allToolsetIDs[i] < allToolsetIDs[j] }) + slices.Sort(allToolsetIDs) defaultToolsetIDList := make([]ToolsetID, 0, len(defaultIDs)) for id := range defaultIDs { defaultToolsetIDList = append(defaultToolsetIDList, id) } - sort.Slice(defaultToolsetIDList, func(i, j int) bool { return defaultToolsetIDList[i] < defaultToolsetIDList[j] }) + slices.Sort(defaultToolsetIDList) toolsetIDs := b.toolsetIDs @@ -326,3 +374,65 @@ func (b *Builder) processToolsets() (map[ToolsetID]bool, []string, []ToolsetID, } return enabledToolsets, unrecognized, allToolsetIDs, validIDs, defaultToolsetIDList, descriptions } + +// insidersOnlyMetaKeys lists the Meta keys that are only available in insiders mode. +// Add new experimental feature keys here to have them automatically stripped +// when insiders mode is disabled. +var insidersOnlyMetaKeys = []string{ + "ui", // MCP Apps UI metadata +} + +// stripInsidersFeatures removes insiders-only features from tools. +// This includes removing tools marked with InsidersOnly and stripping +// Meta keys listed in insidersOnlyMetaKeys from remaining tools. +func stripInsidersFeatures(tools []ServerTool) []ServerTool { + result := make([]ServerTool, 0, len(tools)) + for _, tool := range tools { + // Skip tools marked as insiders-only + if tool.InsidersOnly { + continue + } + if stripped := stripInsidersMetaFromTool(tool); stripped != nil { + result = append(result, *stripped) + } else { + result = append(result, tool) + } + } + return result +} + +// stripInsidersMetaFromTool removes insiders-only Meta keys from a single tool. +// Returns a modified copy if changes were made, nil otherwise. +func stripInsidersMetaFromTool(tool ServerTool) *ServerTool { + if tool.Tool.Meta == nil { + return nil + } + + // Check if any insiders-only keys exist + hasInsidersKeys := false + for _, key := range insidersOnlyMetaKeys { + if tool.Tool.Meta[key] != nil { + hasInsidersKeys = true + break + } + } + if !hasInsidersKeys { + return nil + } + + // Make a shallow copy and remove insiders-only keys + toolCopy := tool + newMeta := make(map[string]any, len(tool.Tool.Meta)) + for k, v := range tool.Tool.Meta { + if !slices.Contains(insidersOnlyMetaKeys, k) { + newMeta[k] = v + } + } + + if len(newMeta) == 0 { + toolCopy.Tool.Meta = nil + } else { + toolCopy.Tool.Meta = newMeta + } + return &toolCopy +} diff --git a/pkg/inventory/filters.go b/pkg/inventory/filters.go index 533bba552d..707457853c 100644 --- a/pkg/inventory/filters.go +++ b/pkg/inventory/filters.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "os" + "slices" "sort" ) @@ -266,7 +267,7 @@ func (r *Inventory) EnabledToolsetIDs() []ToolsetID { ids = append(ids, id) } } - sort.Slice(ids, func(i, j int) bool { return ids[i] < ids[j] }) + slices.Sort(ids) return ids } diff --git a/pkg/inventory/registry_test.go b/pkg/inventory/registry_test.go index bb3337af02..207e65dba8 100644 --- a/pkg/inventory/registry_test.go +++ b/pkg/inventory/registry_test.go @@ -1832,3 +1832,448 @@ func TestWithTools_DeprecatedAliasAndFeatureFlag(t *testing.T) { t.Errorf("Flag ON: Expected new_tool (via alias), got %s", availableOn[0].Tool.Name) } } + +// mockToolWithMeta creates a ServerTool with Meta for testing insiders mode +func mockToolWithMeta(name string, toolsetID string, meta map[string]any) ServerTool { + return NewServerToolFromHandler( + mcp.Tool{ + Name: name, + Annotations: &mcp.ToolAnnotations{ + ReadOnlyHint: true, + }, + InputSchema: json.RawMessage(`{"type":"object","properties":{}}`), + Meta: meta, + }, + testToolsetMetadata(toolsetID), + func(_ any) mcp.ToolHandler { + return func(_ context.Context, _ *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + return nil, nil + } + }, + ) +} + +func TestWithInsidersMode_DisabledStripsUIMetadata(t *testing.T) { + toolWithUI := mockToolWithMeta("tool_with_ui", "toolset1", map[string]any{ + "ui": map[string]any{"html": "
hello
"}, + "description": "kept", + }) + + // Default: insiders mode is disabled - UI meta should be stripped + reg := mustBuild(t, NewBuilder().SetTools([]ServerTool{toolWithUI}).WithToolsets([]string{"all"})) + available := reg.AvailableTools(context.Background()) + + require.Len(t, available, 1) + // UI metadata should be stripped + if available[0].Tool.Meta["ui"] != nil { + t.Errorf("Expected 'ui' meta to be stripped, but it was present") + } + // Other metadata should be preserved + if available[0].Tool.Meta["description"] != "kept" { + t.Errorf("Expected 'description' meta to be preserved, got %v", available[0].Tool.Meta["description"]) + } +} + +func TestWithInsidersMode_EnabledPreservesUIMetadata(t *testing.T) { + uiData := map[string]any{"html": "
hello
"} + toolWithUI := mockToolWithMeta("tool_with_ui", "toolset1", map[string]any{ + "ui": uiData, + "description": "kept", + }) + + // Insiders mode enabled - UI meta should be preserved + reg := mustBuild(t, NewBuilder(). + SetTools([]ServerTool{toolWithUI}). + WithToolsets([]string{"all"}). + WithInsidersMode(true)) + available := reg.AvailableTools(context.Background()) + + require.Len(t, available, 1) + // UI metadata should be preserved + if available[0].Tool.Meta["ui"] == nil { + t.Errorf("Expected 'ui' meta to be preserved in insiders mode") + } + // Other metadata should also be preserved + if available[0].Tool.Meta["description"] != "kept" { + t.Errorf("Expected 'description' meta to be preserved, got %v", available[0].Tool.Meta["description"]) + } +} + +func TestWithInsidersMode_EnabledPreservesInsidersOnlyTools(t *testing.T) { + normalTool := mockTool("normal", "toolset1", true) + insidersTool := mockTool("insiders_only", "toolset1", true) + insidersTool.InsidersOnly = true + + // With insiders mode enabled, both tools should be available + reg := mustBuild(t, NewBuilder(). + SetTools([]ServerTool{normalTool, insidersTool}). + WithToolsets([]string{"all"}). + WithInsidersMode(true)) + available := reg.AvailableTools(context.Background()) + + require.Len(t, available, 2) + names := []string{available[0].Tool.Name, available[1].Tool.Name} + require.Contains(t, names, "normal") + require.Contains(t, names, "insiders_only") +} + +func TestWithInsidersMode_DisabledRemovesInsidersOnlyTools(t *testing.T) { + normalTool := mockTool("normal", "toolset1", true) + insidersTool := mockTool("insiders_only", "toolset1", true) + insidersTool.InsidersOnly = true + + // With insiders mode disabled, insiders-only tool should be removed + reg := mustBuild(t, NewBuilder(). + SetTools([]ServerTool{normalTool, insidersTool}). + WithToolsets([]string{"all"}). + WithInsidersMode(false)) + available := reg.AvailableTools(context.Background()) + + require.Len(t, available, 1) + require.Equal(t, "normal", available[0].Tool.Name) +} + +func TestWithInsidersMode_ToolsWithoutUIMetaUnaffected(t *testing.T) { + toolNoUI := mockToolWithMeta("tool_no_ui", "toolset1", map[string]any{ + "description": "kept", + "version": "1.0", + }) + toolNilMeta := mockTool("tool_nil_meta", "toolset1", true) + + // Test with insiders disabled + reg := mustBuild(t, NewBuilder(). + SetTools([]ServerTool{toolNoUI, toolNilMeta}). + WithToolsets([]string{"all"})) + available := reg.AvailableTools(context.Background()) + + require.Len(t, available, 2) + + // Find toolNoUI + var foundNoUI, foundNilMeta *ServerTool + for i := range available { + switch available[i].Tool.Name { + case "tool_no_ui": + foundNoUI = &available[i] + case "tool_nil_meta": + foundNilMeta = &available[i] + } + } + + require.NotNil(t, foundNoUI) + require.NotNil(t, foundNilMeta) + + // toolNoUI should have its metadata preserved + if foundNoUI.Tool.Meta["description"] != "kept" || foundNoUI.Tool.Meta["version"] != "1.0" { + t.Errorf("Expected toolNoUI meta to be unchanged, got %v", foundNoUI.Tool.Meta) + } + + // toolNilMeta should still have nil meta + if foundNilMeta.Tool.Meta != nil { + t.Errorf("Expected toolNilMeta to have nil meta, got %v", foundNilMeta.Tool.Meta) + } +} + +func TestWithInsidersMode_UIOnlyMetaBecomesNil(t *testing.T) { + // Tool with ONLY ui metadata - should become nil after stripping + toolUIOnly := mockToolWithMeta("tool_ui_only", "toolset1", map[string]any{ + "ui": map[string]any{"html": "
hello
"}, + }) + + reg := mustBuild(t, NewBuilder(). + SetTools([]ServerTool{toolUIOnly}). + WithToolsets([]string{"all"})) + available := reg.AvailableTools(context.Background()) + + require.Len(t, available, 1) + // Meta should be nil since ui was the only key + if available[0].Tool.Meta != nil { + t.Errorf("Expected Meta to be nil after stripping only key, got %v", available[0].Tool.Meta) + } +} + +func TestStripInsidersMetaFromTool(t *testing.T) { + tests := []struct { + name string + meta map[string]any + expectChange bool + expectedMeta map[string]any // nil means Meta should be nil + }{ + { + name: "nil meta - no change", + meta: nil, + expectChange: false, + }, + { + name: "no insiders keys - no change", + meta: map[string]any{"description": "test", "version": "1.0"}, + expectChange: false, + }, + { + name: "ui key only - becomes nil", + meta: map[string]any{"ui": "data"}, + expectChange: true, + expectedMeta: nil, + }, + { + name: "ui key with other keys - ui stripped", + meta: map[string]any{"ui": "data", "description": "kept"}, + expectChange: true, + expectedMeta: map[string]any{"description": "kept"}, + }, + { + name: "ui is nil value - no change (nil value means key not present)", + meta: map[string]any{"ui": nil, "description": "kept"}, + expectChange: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tool := mockToolWithMeta("test", "toolset1", tt.meta) + result := stripInsidersMetaFromTool(tool) + + if tt.expectChange { + require.NotNil(t, result, "expected change but got nil") + if tt.expectedMeta == nil { + require.Nil(t, result.Tool.Meta, "expected Meta to be nil") + } else { + // Compare values by key since types may differ (map[string]any vs mcp.Meta) + for k, v := range tt.expectedMeta { + require.Equal(t, v, result.Tool.Meta[k], "key %s should match", k) + } + require.Len(t, result.Tool.Meta, len(tt.expectedMeta)) + } + } else { + require.Nil(t, result, "expected no change but got result") + } + }) + } +} + +func TestStripInsidersFeatures(t *testing.T) { + tools := []ServerTool{ + mockToolWithMeta("tool1", "toolset1", map[string]any{"ui": "data"}), + mockToolWithMeta("tool2", "toolset1", map[string]any{"description": "kept"}), + mockTool("tool3", "toolset1", true), // nil meta + } + + result := stripInsidersFeatures(tools) + + require.Len(t, result, 3) + + // tool1: ui should be stripped, meta becomes nil + require.Nil(t, result[0].Tool.Meta, "tool1 meta should be nil after stripping ui") + + // tool2: unchanged (compare by key since types differ) + require.Equal(t, "kept", result[1].Tool.Meta["description"]) + require.Len(t, result[1].Tool.Meta, 1) + + // tool3: unchanged (nil) + require.Nil(t, result[2].Tool.Meta) +} + +func TestStripInsidersFeatures_RemovesInsidersOnlyTools(t *testing.T) { + // Create tools: one normal, one insiders-only, one normal + normalTool1 := mockTool("normal1", "toolset1", true) + insidersTool := mockTool("insiders_only", "toolset1", true) + insidersTool.InsidersOnly = true + normalTool2 := mockTool("normal2", "toolset1", true) + + tools := []ServerTool{normalTool1, insidersTool, normalTool2} + + result := stripInsidersFeatures(tools) + + // Should only have 2 tools (insiders-only tool filtered out) + require.Len(t, result, 2) + require.Equal(t, "normal1", result[0].Tool.Name) + require.Equal(t, "normal2", result[1].Tool.Name) +} + +func TestInsidersOnlyMetaKeys_FutureAdditions(t *testing.T) { + // This test verifies the mechanism works for multiple keys + // If we add new experimental keys to insidersOnlyMetaKeys, they should be stripped + + // Save original and restore after test + originalKeys := insidersOnlyMetaKeys + defer func() { insidersOnlyMetaKeys = originalKeys }() + + // Add a hypothetical future experimental key + insidersOnlyMetaKeys = []string{"ui", "experimental_feature", "beta"} + + tool := mockToolWithMeta("test", "toolset1", map[string]any{ + "ui": "ui data", + "experimental_feature": "exp data", + "beta": "beta data", + "description": "kept", + }) + + result := stripInsidersMetaFromTool(tool) + + require.NotNil(t, result) + require.NotNil(t, result.Tool.Meta) + require.Nil(t, result.Tool.Meta["ui"], "ui should be stripped") + require.Nil(t, result.Tool.Meta["experimental_feature"], "experimental_feature should be stripped") + require.Nil(t, result.Tool.Meta["beta"], "beta should be stripped") + require.Equal(t, "kept", result.Tool.Meta["description"], "description should be preserved") +} + +func TestWithInsidersMode_DoesNotMutateOriginalTools(t *testing.T) { + originalMeta := map[string]any{"ui": "data", "description": "kept"} + tool := mockToolWithMeta("test", "toolset1", originalMeta) + tools := []ServerTool{tool} + + // Build with insiders disabled - should strip ui + _ = mustBuild(t, NewBuilder().SetTools(tools).WithToolsets([]string{"all"})) + + // Original tool should be unchanged + require.Equal(t, "data", tools[0].Tool.Meta["ui"], "original tool should not be mutated") + require.Equal(t, "kept", tools[0].Tool.Meta["description"], "original tool should not be mutated") +} + +func TestWithExcludeTools(t *testing.T) { + tools := []ServerTool{ + mockTool("tool1", "toolset1", true), + mockTool("tool2", "toolset1", true), + mockTool("tool3", "toolset2", true), + } + + tests := []struct { + name string + excluded []string + toolsets []string + expectedNames []string + unexpectedNames []string + }{ + { + name: "single tool excluded", + excluded: []string{"tool2"}, + toolsets: []string{"all"}, + expectedNames: []string{"tool1", "tool3"}, + unexpectedNames: []string{"tool2"}, + }, + { + name: "multiple tools excluded", + excluded: []string{"tool1", "tool3"}, + toolsets: []string{"all"}, + expectedNames: []string{"tool2"}, + unexpectedNames: []string{"tool1", "tool3"}, + }, + { + name: "empty excluded list is a no-op", + excluded: []string{}, + toolsets: []string{"all"}, + expectedNames: []string{"tool1", "tool2", "tool3"}, + unexpectedNames: nil, + }, + { + name: "nil excluded list is a no-op", + excluded: nil, + toolsets: []string{"all"}, + expectedNames: []string{"tool1", "tool2", "tool3"}, + unexpectedNames: nil, + }, + { + name: "excluding non-existent tool is a no-op", + excluded: []string{"nonexistent"}, + toolsets: []string{"all"}, + expectedNames: []string{"tool1", "tool2", "tool3"}, + unexpectedNames: nil, + }, + { + name: "exclude all tools", + excluded: []string{"tool1", "tool2", "tool3"}, + toolsets: []string{"all"}, + expectedNames: nil, + unexpectedNames: []string{"tool1", "tool2", "tool3"}, + }, + { + name: "whitespace is trimmed", + excluded: []string{" tool2 ", " tool3 "}, + toolsets: []string{"all"}, + expectedNames: []string{"tool1"}, + unexpectedNames: []string{"tool2", "tool3"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + reg := mustBuild(t, NewBuilder(). + SetTools(tools). + WithToolsets(tt.toolsets). + WithExcludeTools(tt.excluded)) + + available := reg.AvailableTools(context.Background()) + names := make(map[string]bool) + for _, tool := range available { + names[tool.Tool.Name] = true + } + + for _, expected := range tt.expectedNames { + require.True(t, names[expected], "tool %q should be available", expected) + } + for _, unexpected := range tt.unexpectedNames { + require.False(t, names[unexpected], "tool %q should be excluded", unexpected) + } + }) + } +} + +func TestWithExcludeTools_OverridesAdditionalTools(t *testing.T) { + tools := []ServerTool{ + mockTool("tool1", "toolset1", true), + mockTool("tool2", "toolset1", true), + mockTool("tool3", "toolset2", true), + } + + // tool3 is explicitly enabled via WithTools, but also excluded + // excluded should win because builder filters run before additional tools check + reg := mustBuild(t, NewBuilder(). + SetTools(tools). + WithToolsets([]string{"toolset1"}). + WithTools([]string{"tool3"}). + WithExcludeTools([]string{"tool3"})) + + available := reg.AvailableTools(context.Background()) + names := make(map[string]bool) + for _, tool := range available { + names[tool.Tool.Name] = true + } + + require.True(t, names["tool1"], "tool1 should be available") + require.True(t, names["tool2"], "tool2 should be available") + require.False(t, names["tool3"], "tool3 should be excluded even though explicitly added via WithTools") +} + +func TestWithExcludeTools_CombinesWithReadOnly(t *testing.T) { + tools := []ServerTool{ + mockTool("read_tool", "toolset1", true), + mockTool("write_tool", "toolset1", false), + mockTool("another_read", "toolset1", true), + } + + // read-only excludes write_tool, exclude-tools excludes read_tool + reg := mustBuild(t, NewBuilder(). + SetTools(tools). + WithToolsets([]string{"all"}). + WithReadOnly(true). + WithExcludeTools([]string{"read_tool"})) + + available := reg.AvailableTools(context.Background()) + require.Len(t, available, 1) + require.Equal(t, "another_read", available[0].Tool.Name) +} + +func TestCreateExcludeToolsFilter(t *testing.T) { + filter := CreateExcludeToolsFilter([]string{"blocked_tool"}) + + blockedTool := mockTool("blocked_tool", "toolset1", true) + allowedTool := mockTool("allowed_tool", "toolset1", true) + + allowed, err := filter(context.Background(), &blockedTool) + require.NoError(t, err) + require.False(t, allowed, "blocked_tool should be excluded") + + allowed, err = filter(context.Background(), &allowedTool) + require.NoError(t, err) + require.True(t, allowed, "allowed_tool should be included") +} diff --git a/pkg/inventory/server_tool.go b/pkg/inventory/server_tool.go index 752a4c2bd0..b08ae1f014 100644 --- a/pkg/inventory/server_tool.go +++ b/pkg/inventory/server_tool.go @@ -82,6 +82,10 @@ type ServerTool struct { // This includes the required scopes plus any higher-level scopes that provide // the necessary permissions due to scope hierarchy. AcceptedScopes []string + + // InsidersOnly marks this tool as only available when insiders mode is enabled. + // When insiders mode is disabled, tools with this flag set are completely omitted. + InsidersOnly bool } // IsReadOnly returns true if this tool is marked as read-only via annotations. diff --git a/pkg/lockdown/lockdown.go b/pkg/lockdown/lockdown.go index 80eca07f87..2dceac8aa6 100644 --- a/pkg/lockdown/lockdown.go +++ b/pkg/lockdown/lockdown.go @@ -220,7 +220,7 @@ func (c *RepoAccessCache) queryRepoAccessInfo(ctx context.Context, username, own } `graphql:"repository(owner: $owner, name: $name)"` } - variables := map[string]interface{}{ + variables := map[string]any{ "owner": githubv4.String(owner), "name": githubv4.String(repo), "username": githubv4.String(username), diff --git a/pkg/raw/raw.go b/pkg/raw/raw.go index 10bade5eb2..df9cd0ad11 100644 --- a/pkg/raw/raw.go +++ b/pkg/raw/raw.go @@ -6,7 +6,7 @@ import ( "net/http" "net/url" - gogithub "github.com/google/go-github/v79/github" + gogithub "github.com/google/go-github/v82/github" ) // GetRawClientFn is a function type that returns a RawClient instance. @@ -25,7 +25,7 @@ func NewClient(client *gogithub.Client, rawURL *url.URL) *Client { return &Client{client: client, url: rawURL} } -func (c *Client) newRequest(ctx context.Context, method string, urlStr string, body interface{}, opts ...gogithub.RequestOption) (*http.Request, error) { +func (c *Client) newRequest(ctx context.Context, method string, urlStr string, body any, opts ...gogithub.RequestOption) (*http.Request, error) { req, err := c.client.NewRequest(method, urlStr, body, opts...) if err != nil { return nil, err diff --git a/pkg/raw/raw_test.go b/pkg/raw/raw_test.go index 4c4aa33b4a..6897f492f6 100644 --- a/pkg/raw/raw_test.go +++ b/pkg/raw/raw_test.go @@ -9,7 +9,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v79/github" + "github.com/google/go-github/v82/github" "github.com/stretchr/testify/require" ) diff --git a/pkg/scopes/fetcher.go b/pkg/scopes/fetcher.go index 48e0001796..b372455031 100644 --- a/pkg/scopes/fetcher.go +++ b/pkg/scopes/fetcher.go @@ -7,6 +7,9 @@ import ( "net/url" "strings" "time" + + "github.com/github/github-mcp-server/pkg/http/headers" + "github.com/github/github-mcp-server/pkg/utils" ) // OAuthScopesHeader is the HTTP response header containing the token's OAuth scopes. @@ -23,28 +26,27 @@ type FetcherOptions struct { // APIHost is the GitHub API host (e.g., "https://api.github.com"). // Defaults to "https://api.github.com" if empty. - APIHost string + APIHost utils.APIHostResolver +} + +type FetcherInterface interface { + FetchTokenScopes(ctx context.Context, token string) ([]string, error) } // Fetcher retrieves token scopes from GitHub's API. // It uses an HTTP HEAD request to minimize bandwidth since we only need headers. type Fetcher struct { client *http.Client - apiHost string + apiHost utils.APIHostResolver } // NewFetcher creates a new scope fetcher with the given options. -func NewFetcher(opts FetcherOptions) *Fetcher { +func NewFetcher(apiHost utils.APIHostResolver, opts FetcherOptions) *Fetcher { client := opts.HTTPClient if client == nil { client = &http.Client{Timeout: DefaultFetchTimeout} } - apiHost := opts.APIHost - if apiHost == "" { - apiHost = "https://api.github.com" - } - return &Fetcher{ client: client, apiHost: apiHost, @@ -61,8 +63,13 @@ func NewFetcher(opts FetcherOptions) *Fetcher { // Note: Fine-grained PATs don't return the X-OAuth-Scopes header, so an empty // slice is returned for those tokens. func (f *Fetcher) FetchTokenScopes(ctx context.Context, token string) ([]string, error) { + apiHostURL, err := f.apiHost.BaseRESTURL(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get API host URL: %w", err) + } + // Use a lightweight endpoint that requires authentication - endpoint, err := url.JoinPath(f.apiHost, "/") + endpoint, err := url.JoinPath(apiHostURL.String(), "/") if err != nil { return nil, fmt.Errorf("failed to construct API URL: %w", err) } @@ -72,9 +79,9 @@ func (f *Fetcher) FetchTokenScopes(ctx context.Context, token string) ([]string, return nil, fmt.Errorf("failed to create request: %w", err) } - req.Header.Set("Authorization", "Bearer "+token) - req.Header.Set("Accept", "application/vnd.github+json") - req.Header.Set("X-GitHub-Api-Version", "2022-11-28") + req.Header.Set(headers.AuthorizationHeader, "Bearer "+token) + req.Header.Set(headers.AcceptHeader, "application/vnd.github+json") + req.Header.Set(headers.GitHubAPIVersionHeader, "2022-11-28") resp, err := f.client.Do(req) if err != nil { @@ -115,11 +122,16 @@ func ParseScopeHeader(header string) []string { // FetchTokenScopes is a convenience function that creates a default fetcher // and fetches the token scopes. func FetchTokenScopes(ctx context.Context, token string) ([]string, error) { - return NewFetcher(FetcherOptions{}).FetchTokenScopes(ctx, token) + apiHost, err := utils.NewAPIHost("https://api.github.com/") + if err != nil { + return nil, fmt.Errorf("failed to create default API host: %w", err) + } + + return NewFetcher(apiHost, FetcherOptions{}).FetchTokenScopes(ctx, token) } // FetchTokenScopesWithHost is a convenience function that creates a fetcher // for a specific API host and fetches the token scopes. -func FetchTokenScopesWithHost(ctx context.Context, token, apiHost string) ([]string, error) { - return NewFetcher(FetcherOptions{APIHost: apiHost}).FetchTokenScopes(ctx, token) +func FetchTokenScopesWithHost(ctx context.Context, token string, apiHost utils.APIHostResolver) ([]string, error) { + return NewFetcher(apiHost, FetcherOptions{}).FetchTokenScopes(ctx, token) } diff --git a/pkg/scopes/fetcher_test.go b/pkg/scopes/fetcher_test.go index 13feab5b0f..2d887d7a8e 100644 --- a/pkg/scopes/fetcher_test.go +++ b/pkg/scopes/fetcher_test.go @@ -4,6 +4,7 @@ import ( "context" "net/http" "net/http/httptest" + "net/url" "testing" "time" @@ -11,6 +12,23 @@ import ( "github.com/stretchr/testify/require" ) +type testAPIHostResolver struct { + baseURL string +} + +func (t testAPIHostResolver) BaseRESTURL(_ context.Context) (*url.URL, error) { + return url.Parse(t.baseURL) +} +func (t testAPIHostResolver) GraphqlURL(_ context.Context) (*url.URL, error) { + return nil, nil +} +func (t testAPIHostResolver) UploadURL(_ context.Context) (*url.URL, error) { + return nil, nil +} +func (t testAPIHostResolver) RawURL(_ context.Context) (*url.URL, error) { + return nil, nil +} + func TestParseScopeHeader(t *testing.T) { tests := []struct { name string @@ -146,10 +164,8 @@ func TestFetcher_FetchTokenScopes(t *testing.T) { t.Run(tt.name, func(t *testing.T) { server := httptest.NewServer(tt.handler) defer server.Close() - - fetcher := NewFetcher(FetcherOptions{ - APIHost: server.URL, - }) + apiHost := testAPIHostResolver{baseURL: server.URL} + fetcher := NewFetcher(apiHost, FetcherOptions{}) scopes, err := fetcher.FetchTokenScopes(context.Background(), "test-token") @@ -167,10 +183,13 @@ func TestFetcher_FetchTokenScopes(t *testing.T) { } func TestFetcher_DefaultOptions(t *testing.T) { - fetcher := NewFetcher(FetcherOptions{}) + apiHost := testAPIHostResolver{baseURL: "https://api.github.com"} + fetcher := NewFetcher(apiHost, FetcherOptions{}) // Verify default API host is set - assert.Equal(t, "https://api.github.com", fetcher.apiHost) + apiURL, err := fetcher.apiHost.BaseRESTURL(context.Background()) + require.NoError(t, err) + assert.Equal(t, "https://api.github.com", apiURL.String()) // Verify default HTTP client is set with timeout assert.NotNil(t, fetcher.client) @@ -180,7 +199,8 @@ func TestFetcher_DefaultOptions(t *testing.T) { func TestFetcher_CustomHTTPClient(t *testing.T) { customClient := &http.Client{Timeout: 5 * time.Second} - fetcher := NewFetcher(FetcherOptions{ + apiHost := testAPIHostResolver{baseURL: "https://api.github.com"} + fetcher := NewFetcher(apiHost, FetcherOptions{ HTTPClient: customClient, }) @@ -188,11 +208,12 @@ func TestFetcher_CustomHTTPClient(t *testing.T) { } func TestFetcher_CustomAPIHost(t *testing.T) { - fetcher := NewFetcher(FetcherOptions{ - APIHost: "https://api.github.enterprise.com", - }) + apiHost := testAPIHostResolver{baseURL: "https://api.github.enterprise.com"} + fetcher := NewFetcher(apiHost, FetcherOptions{}) - assert.Equal(t, "https://api.github.enterprise.com", fetcher.apiHost) + apiURL, err := fetcher.apiHost.BaseRESTURL(context.Background()) + require.NoError(t, err) + assert.Equal(t, "https://api.github.enterprise.com", apiURL.String()) } func TestFetcher_ContextCancellation(t *testing.T) { @@ -202,9 +223,8 @@ func TestFetcher_ContextCancellation(t *testing.T) { })) defer server.Close() - fetcher := NewFetcher(FetcherOptions{ - APIHost: server.URL, - }) + apiHost := testAPIHostResolver{baseURL: server.URL} + fetcher := NewFetcher(apiHost, FetcherOptions{}) ctx, cancel := context.WithCancel(context.Background()) cancel() // Cancel immediately diff --git a/pkg/scopes/map.go b/pkg/scopes/map.go new file mode 100644 index 0000000000..3c98338347 --- /dev/null +++ b/pkg/scopes/map.go @@ -0,0 +1,129 @@ +package scopes + +import "github.com/github/github-mcp-server/pkg/inventory" + +// ToolScopeMap maps tool names to their scope requirements. +type ToolScopeMap map[string]*ToolScopeInfo + +// ToolScopeInfo contains scope information for a single tool. +type ToolScopeInfo struct { + // RequiredScopes contains the scopes that are directly required by this tool. + RequiredScopes []string + + // AcceptedScopes contains all scopes that satisfy the requirements (including parent scopes). + AcceptedScopes []string +} + +// globalToolScopeMap is populated from inventory when SetToolScopeMapFromInventory is called +var globalToolScopeMap ToolScopeMap + +// SetToolScopeMapFromInventory builds and stores a tool scope map from an inventory. +// This should be called after building the inventory to make scopes available for middleware. +func SetToolScopeMapFromInventory(inv *inventory.Inventory) { + globalToolScopeMap = GetToolScopeMapFromInventory(inv) +} + +// SetGlobalToolScopeMap sets the global tool scope map directly. +// This is useful for testing when you don't have a full inventory. +func SetGlobalToolScopeMap(m ToolScopeMap) { + globalToolScopeMap = m +} + +// GetToolScopeMap returns the global tool scope map. +// Returns an empty map if SetToolScopeMapFromInventory hasn't been called yet. +func GetToolScopeMap() (ToolScopeMap, error) { + if globalToolScopeMap == nil { + return make(ToolScopeMap), nil + } + return globalToolScopeMap, nil +} + +// GetToolScopeInfo returns scope information for a specific tool from the global scope map. +func GetToolScopeInfo(toolName string) (*ToolScopeInfo, error) { + m, err := GetToolScopeMap() + if err != nil { + return nil, err + } + return m[toolName], nil +} + +// GetToolScopeMapFromInventory builds a tool scope map from an inventory. +// This extracts scope information from ServerTool.RequiredScopes and ServerTool.AcceptedScopes. +func GetToolScopeMapFromInventory(inv *inventory.Inventory) ToolScopeMap { + result := make(ToolScopeMap) + + // Get all tools from the inventory (both enabled and disabled) + // We need all tools for scope checking purposes + allTools := inv.AllTools() + for i := range allTools { + tool := &allTools[i] + if len(tool.RequiredScopes) > 0 || len(tool.AcceptedScopes) > 0 { + result[tool.Tool.Name] = &ToolScopeInfo{ + RequiredScopes: tool.RequiredScopes, + AcceptedScopes: tool.AcceptedScopes, + } + } + } + + return result +} + +// HasAcceptedScope checks if any of the provided user scopes satisfy the tool's requirements. +func (t *ToolScopeInfo) HasAcceptedScope(userScopes ...string) bool { + if t == nil || len(t.AcceptedScopes) == 0 { + return true // No scopes required + } + + userScopeSet := make(map[string]bool) + for _, scope := range userScopes { + userScopeSet[scope] = true + } + + for _, scope := range t.AcceptedScopes { + if userScopeSet[scope] { + return true + } + } + return false +} + +// MissingScopes returns the required scopes that are not present in the user's scopes. +func (t *ToolScopeInfo) MissingScopes(userScopes ...string) []string { + if t == nil || len(t.RequiredScopes) == 0 { + return nil + } + + // Create a set of user scopes for O(1) lookup + userScopeSet := make(map[string]bool, len(userScopes)) + for _, s := range userScopes { + userScopeSet[s] = true + } + + // Check if any accepted scope is present + hasAccepted := false + for _, scope := range t.AcceptedScopes { + if userScopeSet[scope] { + hasAccepted = true + break + } + } + + if hasAccepted { + return nil // User has sufficient scopes + } + + // Return required scopes as the minimum needed + missing := make([]string, len(t.RequiredScopes)) + copy(missing, t.RequiredScopes) + return missing +} + +// GetRequiredScopesSlice returns the required scopes as a slice of strings. +func (t *ToolScopeInfo) GetRequiredScopesSlice() []string { + if t == nil { + return nil + } + scopes := make([]string, len(t.RequiredScopes)) + copy(scopes, t.RequiredScopes) + return scopes +} diff --git a/pkg/scopes/map_test.go b/pkg/scopes/map_test.go new file mode 100644 index 0000000000..5f33cdda2b --- /dev/null +++ b/pkg/scopes/map_test.go @@ -0,0 +1,194 @@ +package scopes + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetToolScopeMap(t *testing.T) { + // Reset and set up a test map + SetGlobalToolScopeMap(ToolScopeMap{ + "test_tool": &ToolScopeInfo{ + RequiredScopes: []string{"read:org"}, + AcceptedScopes: []string{"read:org", "write:org", "admin:org"}, + }, + }) + + m, err := GetToolScopeMap() + require.NoError(t, err) + require.NotNil(t, m) + require.Greater(t, len(m), 0, "expected at least one tool in the scope map") + + testTool, ok := m["test_tool"] + require.True(t, ok, "expected test_tool to be in the scope map") + assert.Contains(t, testTool.RequiredScopes, "read:org") + assert.Contains(t, testTool.AcceptedScopes, "read:org") + assert.Contains(t, testTool.AcceptedScopes, "admin:org") +} + +func TestGetToolScopeInfo(t *testing.T) { + // Set up test scope map + SetGlobalToolScopeMap(ToolScopeMap{ + "search_orgs": &ToolScopeInfo{ + RequiredScopes: []string{"read:org"}, + AcceptedScopes: []string{"read:org", "write:org", "admin:org"}, + }, + }) + + info, err := GetToolScopeInfo("search_orgs") + require.NoError(t, err) + require.NotNil(t, info) + + // Non-existent tool should return nil + info, err = GetToolScopeInfo("nonexistent_tool") + require.NoError(t, err) + assert.Nil(t, info) +} + +func TestToolScopeInfo_HasAcceptedScope(t *testing.T) { + testCases := []struct { + name string + scopeInfo *ToolScopeInfo + userScopes []string + expected bool + }{ + { + name: "has exact required scope", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{"read:org"}, + AcceptedScopes: []string{"read:org", "write:org", "admin:org"}, + }, + userScopes: []string{"read:org"}, + expected: true, + }, + { + name: "has parent scope (admin:org grants read:org)", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{"read:org"}, + AcceptedScopes: []string{"read:org", "write:org", "admin:org"}, + }, + userScopes: []string{"admin:org"}, + expected: true, + }, + { + name: "has parent scope (write:org grants read:org)", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{"read:org"}, + AcceptedScopes: []string{"read:org", "write:org", "admin:org"}, + }, + userScopes: []string{"write:org"}, + expected: true, + }, + { + name: "missing required scope", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{"read:org"}, + AcceptedScopes: []string{"read:org", "write:org", "admin:org"}, + }, + userScopes: []string{"repo"}, + expected: false, + }, + { + name: "no scope required", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{}, + AcceptedScopes: []string{}, + }, + userScopes: []string{}, + expected: true, + }, + { + name: "nil scope info", + scopeInfo: nil, + userScopes: []string{}, + expected: true, + }, + { + name: "repo scope for tool requiring repo", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{"repo"}, + AcceptedScopes: []string{"repo"}, + }, + userScopes: []string{"repo"}, + expected: true, + }, + { + name: "missing repo scope", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{"repo"}, + AcceptedScopes: []string{"repo"}, + }, + userScopes: []string{"public_repo"}, + expected: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + result := tc.scopeInfo.HasAcceptedScope(tc.userScopes...) + assert.Equal(t, tc.expected, result) + }) + } +} + +func TestToolScopeInfo_MissingScopes(t *testing.T) { + testCases := []struct { + name string + scopeInfo *ToolScopeInfo + userScopes []string + expectedLen int + expectedScopes []string + }{ + { + name: "has required scope - no missing", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{"read:org"}, + AcceptedScopes: []string{"read:org", "write:org", "admin:org"}, + }, + userScopes: []string{"read:org"}, + expectedLen: 0, + expectedScopes: nil, + }, + { + name: "missing scope", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{"read:org"}, + AcceptedScopes: []string{"read:org", "write:org", "admin:org"}, + }, + userScopes: []string{"repo"}, + expectedLen: 1, + expectedScopes: []string{"read:org"}, + }, + { + name: "no scope required - no missing", + scopeInfo: &ToolScopeInfo{ + RequiredScopes: []string{}, + AcceptedScopes: []string{}, + }, + userScopes: []string{}, + expectedLen: 0, + expectedScopes: nil, + }, + { + name: "nil scope info - no missing", + scopeInfo: nil, + userScopes: []string{}, + expectedLen: 0, + expectedScopes: nil, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + missing := tc.scopeInfo.MissingScopes(tc.userScopes...) + assert.Len(t, missing, tc.expectedLen) + if tc.expectedScopes != nil { + for _, expected := range tc.expectedScopes { + assert.Contains(t, missing, expected) + } + } + }) + } +} diff --git a/pkg/scopes/scopes.go b/pkg/scopes/scopes.go index a9b06e9880..cb1b7681a7 100644 --- a/pkg/scopes/scopes.go +++ b/pkg/scopes/scopes.go @@ -1,6 +1,9 @@ package scopes -import "sort" +import ( + "slices" + "sort" +) // Scope represents a GitHub OAuth scope. // These constants define all OAuth scopes used by the GitHub MCP server tools. @@ -88,9 +91,7 @@ func (s ScopeSet) ToSlice() []Scope { scopes = append(scopes, scope) } // Sort for deterministic output - sort.Slice(scopes, func(i, j int) bool { - return scopes[i] < scopes[j] - }) + slices.Sort(scopes) return scopes } diff --git a/pkg/tooldiscovery/search.go b/pkg/tooldiscovery/search.go index e7adc029b2..e46b028504 100644 --- a/pkg/tooldiscovery/search.go +++ b/pkg/tooldiscovery/search.go @@ -291,10 +291,7 @@ func normalizedSimilarity(a, b string) float64 { } distance := fuzzy.LevenshteinDistance(a, b) - maxLen := len(a) - if len(b) > maxLen { - maxLen = len(b) - } + maxLen := max(len(b), len(a)) similarity := 1 - (float64(distance) / float64(maxLen)) if similarity < 0 { diff --git a/pkg/utils/api.go b/pkg/utils/api.go new file mode 100644 index 0000000000..a523917de5 --- /dev/null +++ b/pkg/utils/api.go @@ -0,0 +1,222 @@ +package utils //nolint:revive //TODO: figure out a better name for this package + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strings" + "time" +) + +type APIHostResolver interface { + BaseRESTURL(ctx context.Context) (*url.URL, error) + GraphqlURL(ctx context.Context) (*url.URL, error) + UploadURL(ctx context.Context) (*url.URL, error) + RawURL(ctx context.Context) (*url.URL, error) +} + +type APIHost struct { + restURL *url.URL + gqlURL *url.URL + uploadURL *url.URL + rawURL *url.URL +} + +var _ APIHostResolver = APIHost{} + +func NewAPIHost(s string) (APIHostResolver, error) { + a, err := parseAPIHost(s) + + if err != nil { + return nil, err + } + + return a, nil +} + +// APIHostResolver implementation +func (a APIHost) BaseRESTURL(_ context.Context) (*url.URL, error) { + return a.restURL, nil +} + +func (a APIHost) GraphqlURL(_ context.Context) (*url.URL, error) { + return a.gqlURL, nil +} + +func (a APIHost) UploadURL(_ context.Context) (*url.URL, error) { + return a.uploadURL, nil +} + +func (a APIHost) RawURL(_ context.Context) (*url.URL, error) { + return a.rawURL, nil +} + +func newDotcomHost() (APIHost, error) { + baseRestURL, err := url.Parse("https://api.github.com/") + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse dotcom REST URL: %w", err) + } + + gqlURL, err := url.Parse("https://api.github.com/graphql") + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse dotcom GraphQL URL: %w", err) + } + + uploadURL, err := url.Parse("https://uploads.github.com") + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse dotcom Upload URL: %w", err) + } + + rawURL, err := url.Parse("https://raw.githubusercontent.com/") + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse dotcom Raw URL: %w", err) + } + + return APIHost{ + restURL: baseRestURL, + gqlURL: gqlURL, + uploadURL: uploadURL, + rawURL: rawURL, + }, nil +} + +func newGHECHost(hostname string) (APIHost, error) { + u, err := url.Parse(hostname) + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHEC URL: %w", err) + } + + // Unsecured GHEC would be an error + if u.Scheme == "http" { + return APIHost{}, fmt.Errorf("GHEC URL must be HTTPS") + } + + restURL, err := url.Parse(fmt.Sprintf("https://api.%s/", u.Hostname())) + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHEC REST URL: %w", err) + } + + gqlURL, err := url.Parse(fmt.Sprintf("https://api.%s/graphql", u.Hostname())) + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHEC GraphQL URL: %w", err) + } + + uploadURL, err := url.Parse(fmt.Sprintf("https://uploads.%s/", u.Hostname())) + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHEC Upload URL: %w", err) + } + + rawURL, err := url.Parse(fmt.Sprintf("https://raw.%s/", u.Hostname())) + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHEC Raw URL: %w", err) + } + + return APIHost{ + restURL: restURL, + gqlURL: gqlURL, + uploadURL: uploadURL, + rawURL: rawURL, + }, nil +} + +func newGHESHost(hostname string) (APIHost, error) { + u, err := url.Parse(hostname) + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHES URL: %w", err) + } + + restURL, err := url.Parse(fmt.Sprintf("%s://%s/api/v3/", u.Scheme, u.Hostname())) + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHES REST URL: %w", err) + } + + gqlURL, err := url.Parse(fmt.Sprintf("%s://%s/api/graphql", u.Scheme, u.Hostname())) + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHES GraphQL URL: %w", err) + } + + // Check if subdomain isolation is enabled + // See https://docs.github.com/en/enterprise-server@3.17/admin/configuring-settings/hardening-security-for-your-enterprise/enabling-subdomain-isolation#about-subdomain-isolation + hasSubdomainIsolation := checkSubdomainIsolation(u.Scheme, u.Hostname()) + + var uploadURL *url.URL + if hasSubdomainIsolation { + // With subdomain isolation: https://uploads.hostname/ + uploadURL, err = url.Parse(fmt.Sprintf("%s://uploads.%s/", u.Scheme, u.Hostname())) + } else { + // Without subdomain isolation: https://hostname/api/uploads/ + uploadURL, err = url.Parse(fmt.Sprintf("%s://%s/api/uploads/", u.Scheme, u.Hostname())) + } + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHES Upload URL: %w", err) + } + + var rawURL *url.URL + if hasSubdomainIsolation { + // With subdomain isolation: https://raw.hostname/ + rawURL, err = url.Parse(fmt.Sprintf("%s://raw.%s/", u.Scheme, u.Hostname())) + } else { + // Without subdomain isolation: https://hostname/raw/ + rawURL, err = url.Parse(fmt.Sprintf("%s://%s/raw/", u.Scheme, u.Hostname())) + } + if err != nil { + return APIHost{}, fmt.Errorf("failed to parse GHES Raw URL: %w", err) + } + + return APIHost{ + restURL: restURL, + gqlURL: gqlURL, + uploadURL: uploadURL, + rawURL: rawURL, + }, nil +} + +// checkSubdomainIsolation detects if GitHub Enterprise Server has subdomain isolation enabled +// by attempting to ping the raw./_ping endpoint on the subdomain. The raw subdomain must always exist for subdomain isolation. +func checkSubdomainIsolation(scheme, hostname string) bool { + subdomainURL := fmt.Sprintf("%s://raw.%s/_ping", scheme, hostname) + + client := &http.Client{ + Timeout: 5 * time.Second, + // Don't follow redirects - we just want to check if the endpoint exists + //nolint:revive // parameters are required by http.Client.CheckRedirect signature + CheckRedirect: func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + }, + } + + resp, err := client.Get(subdomainURL) + if err != nil { + return false + } + defer resp.Body.Close() + + return resp.StatusCode == http.StatusOK +} + +// Note that this does not handle ports yet, so development environments are out. +func parseAPIHost(s string) (APIHost, error) { + if s == "" { + return newDotcomHost() + } + + u, err := url.Parse(s) + if err != nil { + return APIHost{}, fmt.Errorf("could not parse host as URL: %s", s) + } + + if u.Scheme == "" { + return APIHost{}, fmt.Errorf("host must have a scheme (http or https): %s", s) + } + + if strings.HasSuffix(u.Hostname(), "github.com") { + return newDotcomHost() + } + + if strings.HasSuffix(u.Hostname(), "ghe.com") { + return newGHECHost(s) + } + + return newGHESHost(s) +} diff --git a/pkg/utils/result.go b/pkg/utils/result.go index 533fe0573d..1bfd800e28 100644 --- a/pkg/utils/result.go +++ b/pkg/utils/result.go @@ -47,3 +47,15 @@ func NewToolResultResource(message string, contents *mcp.ResourceContents) *mcp. IsError: false, } } + +func NewToolResultResourceLink(message string, link *mcp.ResourceLink) *mcp.CallToolResult { + return &mcp.CallToolResult{ + Content: []mcp.Content{ + &mcp.TextContent{ + Text: message, + }, + link, + }, + IsError: false, + } +} diff --git a/pkg/utils/token.go b/pkg/utils/token.go new file mode 100644 index 0000000000..8933fb0bda --- /dev/null +++ b/pkg/utils/token.go @@ -0,0 +1,75 @@ +package utils //nolint:revive //TODO: figure out a better name for this package + +import ( + "fmt" + "net/http" + "regexp" + "strings" + + httpheaders "github.com/github/github-mcp-server/pkg/http/headers" + "github.com/github/github-mcp-server/pkg/http/mark" +) + +type TokenType int + +const ( + TokenTypeUnknown TokenType = iota + TokenTypePersonalAccessToken + TokenTypeFineGrainedPersonalAccessToken + TokenTypeOAuthAccessToken + TokenTypeUserToServerGitHubAppToken + TokenTypeServerToServerGitHubAppToken +) + +var supportedGitHubPrefixes = map[string]TokenType{ + "ghp_": TokenTypePersonalAccessToken, // Personal access token (classic) + "github_pat_": TokenTypeFineGrainedPersonalAccessToken, // Fine-grained personal access token + "gho_": TokenTypeOAuthAccessToken, // OAuth access token + "ghu_": TokenTypeUserToServerGitHubAppToken, // User access token for a GitHub App + "ghs_": TokenTypeServerToServerGitHubAppToken, // Installation access token for a GitHub App (a.k.a. server-to-server token) +} + +var ( + ErrMissingAuthorizationHeader = fmt.Errorf("%w: missing required Authorization header", mark.ErrBadRequest) + ErrBadAuthorizationHeader = fmt.Errorf("%w: Authorization header is badly formatted", mark.ErrBadRequest) + ErrUnsupportedAuthorizationHeader = fmt.Errorf("%w: unsupported Authorization header", mark.ErrBadRequest) +) + +// oldPatternRegexp is the regular expression for the old pattern of the token. +// Until 2021, GitHub API tokens did not have an identifiable prefix. They +// were 40 characters long and only contained the characters a-f and 0-9. +var oldPatternRegexp = regexp.MustCompile(`\A[a-f0-9]{40}\z`) + +// ParseAuthorizationHeader parses the Authorization header from the HTTP request +func ParseAuthorizationHeader(req *http.Request) (tokenType TokenType, token string, _ error) { + authHeader := req.Header.Get(httpheaders.AuthorizationHeader) + if authHeader == "" { + return 0, "", ErrMissingAuthorizationHeader + } + + switch { + // decrypt dotcom token and set it as token + case strings.HasPrefix(authHeader, "GitHub-Bearer "): + return 0, "", ErrUnsupportedAuthorizationHeader + default: + // support both "Bearer" and "bearer" to conform to api.github.com + if len(authHeader) > 7 && strings.EqualFold(authHeader[:7], "Bearer ") { + token = authHeader[7:] + } else { + token = authHeader + } + } + + for prefix, tokenType := range supportedGitHubPrefixes { + if strings.HasPrefix(token, prefix) { + return tokenType, token, nil + } + } + + matchesOldTokenPattern := oldPatternRegexp.MatchString(token) + if matchesOldTokenPattern { + return TokenTypePersonalAccessToken, token, nil + } + + return 0, "", ErrBadAuthorizationHeader +} diff --git a/script/build-ui b/script/build-ui new file mode 100755 index 0000000000..a68f6764ec --- /dev/null +++ b/script/build-ui @@ -0,0 +1,17 @@ +#!/bin/bash +# Build the MCP App UIs +set -e + +cd "$(dirname "$0")/../ui" + +# Install dependencies if needed +if [ ! -d "node_modules" ]; then + echo "Installing UI dependencies..." + npm install +fi + +echo "Building UI..." +npm run build + +echo "UI build complete. Output:" +ls -la ../pkg/github/ui_dist/*.html diff --git a/script/lint b/script/lint index 47dd537eaf..5b69cbe2ff 100755 --- a/script/lint +++ b/script/lint @@ -5,10 +5,11 @@ gofmt -s -w . BINDIR="$(git rev-parse --show-toplevel)"/bin BINARY=$BINDIR/golangci-lint -GOLANGCI_LINT_VERSION=v2.5.0 +# sync with .github/workflows/lint.yml +GOLANGCI_LINT_VERSION=v2.9.0 if [ ! -f "$BINARY" ]; then - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s "$GOLANGCI_LINT_VERSION" + curl -sSfL https://golangci-lint.run/install.sh | sh -s -- -b "$BINDIR" "$GOLANGCI_LINT_VERSION" fi $BINARY run \ No newline at end of file diff --git a/third-party-licenses.darwin.md b/third-party-licenses.darwin.md index 8217c7707b..de0981d756 100644 --- a/third-party-licenses.darwin.md +++ b/third-party-licenses.darwin.md @@ -15,19 +15,20 @@ The following packages are included for the amd64, arm64 architectures. - [github.com/aymerick/douceur](https://pkg.go.dev/github.com/aymerick/douceur) ([MIT](https://github.com/aymerick/douceur/blob/v0.2.0/LICENSE)) - [github.com/fsnotify/fsnotify](https://pkg.go.dev/github.com/fsnotify/fsnotify) ([BSD-3-Clause](https://github.com/fsnotify/fsnotify/blob/v1.9.0/LICENSE)) - [github.com/github/github-mcp-server](https://pkg.go.dev/github.com/github/github-mcp-server) ([MIT](https://github.com/github/github-mcp-server/blob/HEAD/LICENSE)) - - [github.com/go-openapi/jsonpointer](https://pkg.go.dev/github.com/go-openapi/jsonpointer) ([Apache-2.0](https://github.com/go-openapi/jsonpointer/blob/v0.19.5/LICENSE)) - - [github.com/go-openapi/swag](https://pkg.go.dev/github.com/go-openapi/swag) ([Apache-2.0](https://github.com/go-openapi/swag/blob/v0.21.1/LICENSE)) + - [github.com/go-chi/chi/v5](https://pkg.go.dev/github.com/go-chi/chi/v5) ([MIT](https://github.com/go-chi/chi/blob/v5.2.5/LICENSE)) + - [github.com/go-openapi/jsonpointer](https://pkg.go.dev/github.com/go-openapi/jsonpointer) ([Apache-2.0](https://github.com/go-openapi/jsonpointer/blob/v0.21.0/LICENSE)) + - [github.com/go-openapi/swag](https://pkg.go.dev/github.com/go-openapi/swag) ([Apache-2.0](https://github.com/go-openapi/swag/blob/v0.23.0/LICENSE)) - [github.com/go-viper/mapstructure/v2](https://pkg.go.dev/github.com/go-viper/mapstructure/v2) ([MIT](https://github.com/go-viper/mapstructure/blob/v2.5.0/LICENSE)) - - [github.com/google/go-github/v79/github](https://pkg.go.dev/github.com/google/go-github/v79/github) ([BSD-3-Clause](https://github.com/google/go-github/blob/v79.0.0/LICENSE)) - - [github.com/google/go-querystring/query](https://pkg.go.dev/github.com/google/go-querystring/query) ([BSD-3-Clause](https://github.com/google/go-querystring/blob/v1.1.0/LICENSE)) + - [github.com/google/go-github/v82/github](https://pkg.go.dev/github.com/google/go-github/v82/github) ([BSD-3-Clause](https://github.com/google/go-github/blob/v82.0.0/LICENSE)) + - [github.com/google/go-querystring/query](https://pkg.go.dev/github.com/google/go-querystring/query) ([BSD-3-Clause](https://github.com/google/go-querystring/blob/v1.2.0/LICENSE)) - [github.com/google/jsonschema-go/jsonschema](https://pkg.go.dev/github.com/google/jsonschema-go/jsonschema) ([MIT](https://github.com/google/jsonschema-go/blob/v0.4.2/LICENSE)) - [github.com/gorilla/css/scanner](https://pkg.go.dev/github.com/gorilla/css/scanner) ([BSD-3-Clause](https://github.com/gorilla/css/blob/v1.0.1/LICENSE)) - - [github.com/josephburnett/jd/v2](https://pkg.go.dev/github.com/josephburnett/jd/v2) ([MIT](https://github.com/josephburnett/jd/blob/v1.9.2/LICENSE)) + - [github.com/josephburnett/jd/v2](https://pkg.go.dev/github.com/josephburnett/jd/v2) ([MIT](https://github.com/josephburnett/jd/blob/v2.4.0/v2/LICENSE)) - [github.com/josharian/intern](https://pkg.go.dev/github.com/josharian/intern) ([MIT](https://github.com/josharian/intern/blob/v1.0.0/license.md)) - [github.com/lithammer/fuzzysearch/fuzzy](https://pkg.go.dev/github.com/lithammer/fuzzysearch/fuzzy) ([MIT](https://github.com/lithammer/fuzzysearch/blob/v1.1.8/LICENSE)) - [github.com/mailru/easyjson](https://pkg.go.dev/github.com/mailru/easyjson) ([MIT](https://github.com/mailru/easyjson/blob/v0.7.7/LICENSE)) - [github.com/microcosm-cc/bluemonday](https://pkg.go.dev/github.com/microcosm-cc/bluemonday) ([BSD-3-Clause](https://github.com/microcosm-cc/bluemonday/blob/v1.0.27/LICENSE.md)) - - [github.com/modelcontextprotocol/go-sdk](https://pkg.go.dev/github.com/modelcontextprotocol/go-sdk) ([MIT](https://github.com/modelcontextprotocol/go-sdk/blob/v1.2.0/LICENSE)) + - [github.com/modelcontextprotocol/go-sdk](https://pkg.go.dev/github.com/modelcontextprotocol/go-sdk) ([MIT](https://github.com/modelcontextprotocol/go-sdk/blob/v1.3.0/LICENSE)) - [github.com/muesli/cache2go](https://pkg.go.dev/github.com/muesli/cache2go) ([BSD-3-Clause](https://github.com/muesli/cache2go/blob/518229cd8021/LICENSE.txt)) - [github.com/pelletier/go-toml/v2](https://pkg.go.dev/github.com/pelletier/go-toml/v2) ([MIT](https://github.com/pelletier/go-toml/blob/v2.2.4/LICENSE)) - [github.com/sagikazarmark/locafero](https://pkg.go.dev/github.com/sagikazarmark/locafero) ([MIT](https://github.com/sagikazarmark/locafero/blob/v0.11.0/LICENSE)) @@ -41,12 +42,11 @@ The following packages are included for the amd64, arm64 architectures. - [github.com/spf13/viper](https://pkg.go.dev/github.com/spf13/viper) ([MIT](https://github.com/spf13/viper/blob/v1.21.0/LICENSE)) - [github.com/subosito/gotenv](https://pkg.go.dev/github.com/subosito/gotenv) ([MIT](https://github.com/subosito/gotenv/blob/v1.6.0/LICENSE)) - [github.com/yosida95/uritemplate/v3](https://pkg.go.dev/github.com/yosida95/uritemplate/v3) ([BSD-3-Clause](https://github.com/yosida95/uritemplate/blob/v3.0.2/LICENSE)) - - [github.com/yudai/golcs](https://pkg.go.dev/github.com/yudai/golcs) ([MIT](https://github.com/yudai/golcs/blob/ecda9a501e82/LICENSE)) - [go.yaml.in/yaml/v3](https://pkg.go.dev/go.yaml.in/yaml/v3) ([MIT](https://github.com/yaml/go-yaml/blob/v3.0.4/LICENSE)) - - [golang.org/x/exp](https://pkg.go.dev/golang.org/x/exp) ([BSD-3-Clause](https://cs.opensource.google/go/x/exp/+/8a7402ab:LICENSE)) + - [golang.org/x/exp/slices](https://pkg.go.dev/golang.org/x/exp/slices) ([BSD-3-Clause](https://cs.opensource.google/go/x/exp/+/054e65f0:LICENSE)) - [golang.org/x/net/html](https://pkg.go.dev/golang.org/x/net/html) ([BSD-3-Clause](https://cs.opensource.google/go/x/net/+/v0.38.0:LICENSE)) - [golang.org/x/sys/unix](https://pkg.go.dev/golang.org/x/sys/unix) ([BSD-3-Clause](https://cs.opensource.google/go/x/sys/+/v0.31.0:LICENSE)) - [golang.org/x/text](https://pkg.go.dev/golang.org/x/text) ([BSD-3-Clause](https://cs.opensource.google/go/x/text/+/v0.28.0:LICENSE)) - - [gopkg.in/yaml.v2](https://pkg.go.dev/gopkg.in/yaml.v2) ([Apache-2.0](https://github.com/go-yaml/yaml/blob/v2.4.0/LICENSE)) + - [gopkg.in/yaml.v3](https://pkg.go.dev/gopkg.in/yaml.v3) ([MIT](https://github.com/go-yaml/yaml/blob/v3.0.1/LICENSE)) [github/github-mcp-server]: https://github.com/github/github-mcp-server diff --git a/third-party-licenses.linux.md b/third-party-licenses.linux.md index 981e388e52..48c632c6cf 100644 --- a/third-party-licenses.linux.md +++ b/third-party-licenses.linux.md @@ -15,19 +15,20 @@ The following packages are included for the 386, amd64, arm64 architectures. - [github.com/aymerick/douceur](https://pkg.go.dev/github.com/aymerick/douceur) ([MIT](https://github.com/aymerick/douceur/blob/v0.2.0/LICENSE)) - [github.com/fsnotify/fsnotify](https://pkg.go.dev/github.com/fsnotify/fsnotify) ([BSD-3-Clause](https://github.com/fsnotify/fsnotify/blob/v1.9.0/LICENSE)) - [github.com/github/github-mcp-server](https://pkg.go.dev/github.com/github/github-mcp-server) ([MIT](https://github.com/github/github-mcp-server/blob/HEAD/LICENSE)) - - [github.com/go-openapi/jsonpointer](https://pkg.go.dev/github.com/go-openapi/jsonpointer) ([Apache-2.0](https://github.com/go-openapi/jsonpointer/blob/v0.19.5/LICENSE)) - - [github.com/go-openapi/swag](https://pkg.go.dev/github.com/go-openapi/swag) ([Apache-2.0](https://github.com/go-openapi/swag/blob/v0.21.1/LICENSE)) + - [github.com/go-chi/chi/v5](https://pkg.go.dev/github.com/go-chi/chi/v5) ([MIT](https://github.com/go-chi/chi/blob/v5.2.5/LICENSE)) + - [github.com/go-openapi/jsonpointer](https://pkg.go.dev/github.com/go-openapi/jsonpointer) ([Apache-2.0](https://github.com/go-openapi/jsonpointer/blob/v0.21.0/LICENSE)) + - [github.com/go-openapi/swag](https://pkg.go.dev/github.com/go-openapi/swag) ([Apache-2.0](https://github.com/go-openapi/swag/blob/v0.23.0/LICENSE)) - [github.com/go-viper/mapstructure/v2](https://pkg.go.dev/github.com/go-viper/mapstructure/v2) ([MIT](https://github.com/go-viper/mapstructure/blob/v2.5.0/LICENSE)) - - [github.com/google/go-github/v79/github](https://pkg.go.dev/github.com/google/go-github/v79/github) ([BSD-3-Clause](https://github.com/google/go-github/blob/v79.0.0/LICENSE)) - - [github.com/google/go-querystring/query](https://pkg.go.dev/github.com/google/go-querystring/query) ([BSD-3-Clause](https://github.com/google/go-querystring/blob/v1.1.0/LICENSE)) + - [github.com/google/go-github/v82/github](https://pkg.go.dev/github.com/google/go-github/v82/github) ([BSD-3-Clause](https://github.com/google/go-github/blob/v82.0.0/LICENSE)) + - [github.com/google/go-querystring/query](https://pkg.go.dev/github.com/google/go-querystring/query) ([BSD-3-Clause](https://github.com/google/go-querystring/blob/v1.2.0/LICENSE)) - [github.com/google/jsonschema-go/jsonschema](https://pkg.go.dev/github.com/google/jsonschema-go/jsonschema) ([MIT](https://github.com/google/jsonschema-go/blob/v0.4.2/LICENSE)) - [github.com/gorilla/css/scanner](https://pkg.go.dev/github.com/gorilla/css/scanner) ([BSD-3-Clause](https://github.com/gorilla/css/blob/v1.0.1/LICENSE)) - - [github.com/josephburnett/jd/v2](https://pkg.go.dev/github.com/josephburnett/jd/v2) ([MIT](https://github.com/josephburnett/jd/blob/v1.9.2/LICENSE)) + - [github.com/josephburnett/jd/v2](https://pkg.go.dev/github.com/josephburnett/jd/v2) ([MIT](https://github.com/josephburnett/jd/blob/v2.4.0/v2/LICENSE)) - [github.com/josharian/intern](https://pkg.go.dev/github.com/josharian/intern) ([MIT](https://github.com/josharian/intern/blob/v1.0.0/license.md)) - [github.com/lithammer/fuzzysearch/fuzzy](https://pkg.go.dev/github.com/lithammer/fuzzysearch/fuzzy) ([MIT](https://github.com/lithammer/fuzzysearch/blob/v1.1.8/LICENSE)) - [github.com/mailru/easyjson](https://pkg.go.dev/github.com/mailru/easyjson) ([MIT](https://github.com/mailru/easyjson/blob/v0.7.7/LICENSE)) - [github.com/microcosm-cc/bluemonday](https://pkg.go.dev/github.com/microcosm-cc/bluemonday) ([BSD-3-Clause](https://github.com/microcosm-cc/bluemonday/blob/v1.0.27/LICENSE.md)) - - [github.com/modelcontextprotocol/go-sdk](https://pkg.go.dev/github.com/modelcontextprotocol/go-sdk) ([MIT](https://github.com/modelcontextprotocol/go-sdk/blob/v1.2.0/LICENSE)) + - [github.com/modelcontextprotocol/go-sdk](https://pkg.go.dev/github.com/modelcontextprotocol/go-sdk) ([MIT](https://github.com/modelcontextprotocol/go-sdk/blob/v1.3.0/LICENSE)) - [github.com/muesli/cache2go](https://pkg.go.dev/github.com/muesli/cache2go) ([BSD-3-Clause](https://github.com/muesli/cache2go/blob/518229cd8021/LICENSE.txt)) - [github.com/pelletier/go-toml/v2](https://pkg.go.dev/github.com/pelletier/go-toml/v2) ([MIT](https://github.com/pelletier/go-toml/blob/v2.2.4/LICENSE)) - [github.com/sagikazarmark/locafero](https://pkg.go.dev/github.com/sagikazarmark/locafero) ([MIT](https://github.com/sagikazarmark/locafero/blob/v0.11.0/LICENSE)) @@ -41,12 +42,11 @@ The following packages are included for the 386, amd64, arm64 architectures. - [github.com/spf13/viper](https://pkg.go.dev/github.com/spf13/viper) ([MIT](https://github.com/spf13/viper/blob/v1.21.0/LICENSE)) - [github.com/subosito/gotenv](https://pkg.go.dev/github.com/subosito/gotenv) ([MIT](https://github.com/subosito/gotenv/blob/v1.6.0/LICENSE)) - [github.com/yosida95/uritemplate/v3](https://pkg.go.dev/github.com/yosida95/uritemplate/v3) ([BSD-3-Clause](https://github.com/yosida95/uritemplate/blob/v3.0.2/LICENSE)) - - [github.com/yudai/golcs](https://pkg.go.dev/github.com/yudai/golcs) ([MIT](https://github.com/yudai/golcs/blob/ecda9a501e82/LICENSE)) - [go.yaml.in/yaml/v3](https://pkg.go.dev/go.yaml.in/yaml/v3) ([MIT](https://github.com/yaml/go-yaml/blob/v3.0.4/LICENSE)) - - [golang.org/x/exp](https://pkg.go.dev/golang.org/x/exp) ([BSD-3-Clause](https://cs.opensource.google/go/x/exp/+/8a7402ab:LICENSE)) + - [golang.org/x/exp/slices](https://pkg.go.dev/golang.org/x/exp/slices) ([BSD-3-Clause](https://cs.opensource.google/go/x/exp/+/054e65f0:LICENSE)) - [golang.org/x/net/html](https://pkg.go.dev/golang.org/x/net/html) ([BSD-3-Clause](https://cs.opensource.google/go/x/net/+/v0.38.0:LICENSE)) - [golang.org/x/sys/unix](https://pkg.go.dev/golang.org/x/sys/unix) ([BSD-3-Clause](https://cs.opensource.google/go/x/sys/+/v0.31.0:LICENSE)) - [golang.org/x/text](https://pkg.go.dev/golang.org/x/text) ([BSD-3-Clause](https://cs.opensource.google/go/x/text/+/v0.28.0:LICENSE)) - - [gopkg.in/yaml.v2](https://pkg.go.dev/gopkg.in/yaml.v2) ([Apache-2.0](https://github.com/go-yaml/yaml/blob/v2.4.0/LICENSE)) + - [gopkg.in/yaml.v3](https://pkg.go.dev/gopkg.in/yaml.v3) ([MIT](https://github.com/go-yaml/yaml/blob/v3.0.1/LICENSE)) [github/github-mcp-server]: https://github.com/github/github-mcp-server diff --git a/third-party-licenses.windows.md b/third-party-licenses.windows.md index ae0e2389ef..8845d59aab 100644 --- a/third-party-licenses.windows.md +++ b/third-party-licenses.windows.md @@ -15,20 +15,21 @@ The following packages are included for the 386, amd64, arm64 architectures. - [github.com/aymerick/douceur](https://pkg.go.dev/github.com/aymerick/douceur) ([MIT](https://github.com/aymerick/douceur/blob/v0.2.0/LICENSE)) - [github.com/fsnotify/fsnotify](https://pkg.go.dev/github.com/fsnotify/fsnotify) ([BSD-3-Clause](https://github.com/fsnotify/fsnotify/blob/v1.9.0/LICENSE)) - [github.com/github/github-mcp-server](https://pkg.go.dev/github.com/github/github-mcp-server) ([MIT](https://github.com/github/github-mcp-server/blob/HEAD/LICENSE)) - - [github.com/go-openapi/jsonpointer](https://pkg.go.dev/github.com/go-openapi/jsonpointer) ([Apache-2.0](https://github.com/go-openapi/jsonpointer/blob/v0.19.5/LICENSE)) - - [github.com/go-openapi/swag](https://pkg.go.dev/github.com/go-openapi/swag) ([Apache-2.0](https://github.com/go-openapi/swag/blob/v0.21.1/LICENSE)) + - [github.com/go-chi/chi/v5](https://pkg.go.dev/github.com/go-chi/chi/v5) ([MIT](https://github.com/go-chi/chi/blob/v5.2.5/LICENSE)) + - [github.com/go-openapi/jsonpointer](https://pkg.go.dev/github.com/go-openapi/jsonpointer) ([Apache-2.0](https://github.com/go-openapi/jsonpointer/blob/v0.21.0/LICENSE)) + - [github.com/go-openapi/swag](https://pkg.go.dev/github.com/go-openapi/swag) ([Apache-2.0](https://github.com/go-openapi/swag/blob/v0.23.0/LICENSE)) - [github.com/go-viper/mapstructure/v2](https://pkg.go.dev/github.com/go-viper/mapstructure/v2) ([MIT](https://github.com/go-viper/mapstructure/blob/v2.5.0/LICENSE)) - - [github.com/google/go-github/v79/github](https://pkg.go.dev/github.com/google/go-github/v79/github) ([BSD-3-Clause](https://github.com/google/go-github/blob/v79.0.0/LICENSE)) - - [github.com/google/go-querystring/query](https://pkg.go.dev/github.com/google/go-querystring/query) ([BSD-3-Clause](https://github.com/google/go-querystring/blob/v1.1.0/LICENSE)) + - [github.com/google/go-github/v82/github](https://pkg.go.dev/github.com/google/go-github/v82/github) ([BSD-3-Clause](https://github.com/google/go-github/blob/v82.0.0/LICENSE)) + - [github.com/google/go-querystring/query](https://pkg.go.dev/github.com/google/go-querystring/query) ([BSD-3-Clause](https://github.com/google/go-querystring/blob/v1.2.0/LICENSE)) - [github.com/google/jsonschema-go/jsonschema](https://pkg.go.dev/github.com/google/jsonschema-go/jsonschema) ([MIT](https://github.com/google/jsonschema-go/blob/v0.4.2/LICENSE)) - [github.com/gorilla/css/scanner](https://pkg.go.dev/github.com/gorilla/css/scanner) ([BSD-3-Clause](https://github.com/gorilla/css/blob/v1.0.1/LICENSE)) - [github.com/inconshreveable/mousetrap](https://pkg.go.dev/github.com/inconshreveable/mousetrap) ([Apache-2.0](https://github.com/inconshreveable/mousetrap/blob/v1.1.0/LICENSE)) - - [github.com/josephburnett/jd/v2](https://pkg.go.dev/github.com/josephburnett/jd/v2) ([MIT](https://github.com/josephburnett/jd/blob/v1.9.2/LICENSE)) + - [github.com/josephburnett/jd/v2](https://pkg.go.dev/github.com/josephburnett/jd/v2) ([MIT](https://github.com/josephburnett/jd/blob/v2.4.0/v2/LICENSE)) - [github.com/josharian/intern](https://pkg.go.dev/github.com/josharian/intern) ([MIT](https://github.com/josharian/intern/blob/v1.0.0/license.md)) - [github.com/lithammer/fuzzysearch/fuzzy](https://pkg.go.dev/github.com/lithammer/fuzzysearch/fuzzy) ([MIT](https://github.com/lithammer/fuzzysearch/blob/v1.1.8/LICENSE)) - [github.com/mailru/easyjson](https://pkg.go.dev/github.com/mailru/easyjson) ([MIT](https://github.com/mailru/easyjson/blob/v0.7.7/LICENSE)) - [github.com/microcosm-cc/bluemonday](https://pkg.go.dev/github.com/microcosm-cc/bluemonday) ([BSD-3-Clause](https://github.com/microcosm-cc/bluemonday/blob/v1.0.27/LICENSE.md)) - - [github.com/modelcontextprotocol/go-sdk](https://pkg.go.dev/github.com/modelcontextprotocol/go-sdk) ([MIT](https://github.com/modelcontextprotocol/go-sdk/blob/v1.2.0/LICENSE)) + - [github.com/modelcontextprotocol/go-sdk](https://pkg.go.dev/github.com/modelcontextprotocol/go-sdk) ([MIT](https://github.com/modelcontextprotocol/go-sdk/blob/v1.3.0/LICENSE)) - [github.com/muesli/cache2go](https://pkg.go.dev/github.com/muesli/cache2go) ([BSD-3-Clause](https://github.com/muesli/cache2go/blob/518229cd8021/LICENSE.txt)) - [github.com/pelletier/go-toml/v2](https://pkg.go.dev/github.com/pelletier/go-toml/v2) ([MIT](https://github.com/pelletier/go-toml/blob/v2.2.4/LICENSE)) - [github.com/sagikazarmark/locafero](https://pkg.go.dev/github.com/sagikazarmark/locafero) ([MIT](https://github.com/sagikazarmark/locafero/blob/v0.11.0/LICENSE)) @@ -42,12 +43,11 @@ The following packages are included for the 386, amd64, arm64 architectures. - [github.com/spf13/viper](https://pkg.go.dev/github.com/spf13/viper) ([MIT](https://github.com/spf13/viper/blob/v1.21.0/LICENSE)) - [github.com/subosito/gotenv](https://pkg.go.dev/github.com/subosito/gotenv) ([MIT](https://github.com/subosito/gotenv/blob/v1.6.0/LICENSE)) - [github.com/yosida95/uritemplate/v3](https://pkg.go.dev/github.com/yosida95/uritemplate/v3) ([BSD-3-Clause](https://github.com/yosida95/uritemplate/blob/v3.0.2/LICENSE)) - - [github.com/yudai/golcs](https://pkg.go.dev/github.com/yudai/golcs) ([MIT](https://github.com/yudai/golcs/blob/ecda9a501e82/LICENSE)) - [go.yaml.in/yaml/v3](https://pkg.go.dev/go.yaml.in/yaml/v3) ([MIT](https://github.com/yaml/go-yaml/blob/v3.0.4/LICENSE)) - - [golang.org/x/exp](https://pkg.go.dev/golang.org/x/exp) ([BSD-3-Clause](https://cs.opensource.google/go/x/exp/+/8a7402ab:LICENSE)) + - [golang.org/x/exp/slices](https://pkg.go.dev/golang.org/x/exp/slices) ([BSD-3-Clause](https://cs.opensource.google/go/x/exp/+/054e65f0:LICENSE)) - [golang.org/x/net/html](https://pkg.go.dev/golang.org/x/net/html) ([BSD-3-Clause](https://cs.opensource.google/go/x/net/+/v0.38.0:LICENSE)) - [golang.org/x/sys/windows](https://pkg.go.dev/golang.org/x/sys/windows) ([BSD-3-Clause](https://cs.opensource.google/go/x/sys/+/v0.31.0:LICENSE)) - [golang.org/x/text](https://pkg.go.dev/golang.org/x/text) ([BSD-3-Clause](https://cs.opensource.google/go/x/text/+/v0.28.0:LICENSE)) - - [gopkg.in/yaml.v2](https://pkg.go.dev/gopkg.in/yaml.v2) ([Apache-2.0](https://github.com/go-yaml/yaml/blob/v2.4.0/LICENSE)) + - [gopkg.in/yaml.v3](https://pkg.go.dev/gopkg.in/yaml.v3) ([MIT](https://github.com/go-yaml/yaml/blob/v3.0.1/LICENSE)) [github/github-mcp-server]: https://github.com/github/github-mcp-server diff --git a/third-party/github.com/go-chi/chi/v5/LICENSE b/third-party/github.com/go-chi/chi/v5/LICENSE new file mode 100644 index 0000000000..d99f02ffac --- /dev/null +++ b/third-party/github.com/go-chi/chi/v5/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2015-present Peter Kieltyka (https://github.com/pkieltyka), Google Inc. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/third-party/github.com/google/go-github/v79/github/LICENSE b/third-party/github.com/google/go-github/v82/github/LICENSE similarity index 100% rename from third-party/github.com/google/go-github/v79/github/LICENSE rename to third-party/github.com/google/go-github/v82/github/LICENSE diff --git a/third-party/github.com/yudai/golcs/LICENSE b/third-party/github.com/yudai/golcs/LICENSE deleted file mode 100644 index ab7d2e0fba..0000000000 --- a/third-party/github.com/yudai/golcs/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Iwasaki Yudai - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/third-party/golang.org/x/exp/LICENSE b/third-party/golang.org/x/exp/slices/LICENSE similarity index 100% rename from third-party/golang.org/x/exp/LICENSE rename to third-party/golang.org/x/exp/slices/LICENSE diff --git a/third-party/gopkg.in/yaml.v2/LICENSE b/third-party/gopkg.in/yaml.v2/LICENSE deleted file mode 100644 index 8dada3edaf..0000000000 --- a/third-party/gopkg.in/yaml.v2/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/third-party/gopkg.in/yaml.v3/LICENSE b/third-party/gopkg.in/yaml.v3/LICENSE new file mode 100644 index 0000000000..2683e4bb1f --- /dev/null +++ b/third-party/gopkg.in/yaml.v3/LICENSE @@ -0,0 +1,50 @@ + +This project is covered by two different licenses: MIT and Apache. + +#### MIT License #### + +The following files were ported to Go from C files of libyaml, and thus +are still covered by their original MIT license, with the additional +copyright staring in 2011 when the project was ported over: + + apic.go emitterc.go parserc.go readerc.go scannerc.go + writerc.go yamlh.go yamlprivateh.go + +Copyright (c) 2006-2010 Kirill Simonov +Copyright (c) 2006-2011 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +### Apache License ### + +All the remaining project files are covered by the Apache license: + +Copyright (c) 2011-2019 Canonical Ltd + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/third-party/gopkg.in/yaml.v2/NOTICE b/third-party/gopkg.in/yaml.v3/NOTICE similarity index 100% rename from third-party/gopkg.in/yaml.v2/NOTICE rename to third-party/gopkg.in/yaml.v3/NOTICE diff --git a/ui/package-lock.json b/ui/package-lock.json new file mode 100644 index 0000000000..692c8d132b --- /dev/null +++ b/ui/package-lock.json @@ -0,0 +1,6337 @@ +{ + "name": "@github/mcp-server-ui", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@github/mcp-server-ui", + "version": "1.0.0", + "dependencies": { + "@github/markdown-toolbar-element": "^2.2.3", + "@modelcontextprotocol/ext-apps": "^1.0.0", + "@primer/octicons-react": "^19.0.0", + "@primer/react": "^36.0.0", + "react": "^18.0.0", + "react-dom": "^18.0.0", + "react-markdown": "^10.1.0", + "remark-gfm": "^4.0.1" + }, + "devDependencies": { + "@types/node": "^25.2.0", + "@types/react": "^18.0.0", + "@types/react-dom": "^18.0.0", + "@vitejs/plugin-react": "^4.3.0", + "cross-env": "^7.0.3", + "typescript": "^5.7.0", + "vite": "^6.0.0", + "vite-plugin-singlefile": "^2.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.0.tgz", + "integrity": "sha512-vSH118/wwM/pLR38g/Sgk05sNtro6TlTJKuiMXDaZqPUfjTFcudpCOt00IhOfj+1BFAX+UFAlzCU+6WXr3GLFQ==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz", + "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==", + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.6.tgz", + "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@emotion/is-prop-valid": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.4.0.tgz", + "integrity": "sha512-QgD4fyscGcbbKwJmqNvUMSE02OsHUa+lAWKdEUIJKgqe5IwRSKd7+KhibEWdaKwgjLj0DRSHA9biAIqGBk05lw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@emotion/memoize": "^0.9.0" + } + }, + "node_modules/@emotion/memoize": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.9.0.tgz", + "integrity": "sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ==", + "license": "MIT", + "peer": true + }, + "node_modules/@emotion/stylis": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/@emotion/stylis/-/stylis-0.8.5.tgz", + "integrity": "sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ==", + "license": "MIT", + "peer": true + }, + "node_modules/@emotion/unitless": { + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz", + "integrity": "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==", + "license": "MIT", + "peer": true + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", + "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", + "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", + "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", + "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", + "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", + "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", + "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", + "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", + "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", + "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", + "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", + "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", + "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", + "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", + "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", + "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", + "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", + "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", + "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", + "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", + "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", + "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", + "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", + "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", + "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", + "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@github/combobox-nav": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@github/combobox-nav/-/combobox-nav-2.3.1.tgz", + "integrity": "sha512-gwxPzLw8XKecy1nP63i9lOBritS3bWmxl02UX6G0TwMQZbMem1BCS1tEZgYd3mkrkiDrUMWaX+DbFCuDFo3K+A==", + "license": "MIT" + }, + "node_modules/@github/markdown-toolbar-element": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@github/markdown-toolbar-element/-/markdown-toolbar-element-2.2.3.tgz", + "integrity": "sha512-AlquKGee+IWiAMYVB0xyHFZRMnu4n3X4HTvJHu79GiVJ1ojTukCWyxMlF5NMsecoLcBKsuBhx3QPv2vkE/zQ0A==", + "license": "MIT" + }, + "node_modules/@github/paste-markdown": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/@github/paste-markdown/-/paste-markdown-1.5.3.tgz", + "integrity": "sha512-PzZ1b3PaqBzYqbT4fwKEhiORf38h2OcGp2+JdXNNM7inZ7egaSmfmhyNkQILpqWfS0AYtRS3CDq6z03eZ8yOMQ==", + "license": "MIT" + }, + "node_modules/@github/relative-time-element": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/@github/relative-time-element/-/relative-time-element-4.5.1.tgz", + "integrity": "sha512-uxCxCwe9vdwUDmRmM84tN0UERlj8MosLV44+r/VDj7DZUVUSTP4vyWlE9mRK6vHelOmT8DS3RMlaMrLlg1h1PQ==", + "license": "MIT" + }, + "node_modules/@github/tab-container-element": { + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@github/tab-container-element/-/tab-container-element-4.8.2.tgz", + "integrity": "sha512-WkaM4mfs8x7dXRWEaDb5deC0OhH6sGQ5cw8i/sVw25gikl4f8C7mHj0kihL5k3eKIIqmGT1Fdswdoi+9ZLDpRA==", + "license": "MIT" + }, + "node_modules/@hono/node-server": { + "version": "1.19.9", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.9.tgz", + "integrity": "sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=18.14.1" + }, + "peerDependencies": { + "hono": "^4" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@lit-labs/react": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lit-labs/react/-/react-1.2.1.tgz", + "integrity": "sha512-DiZdJYFU0tBbdQkfwwRSwYyI/mcWkg3sWesKRsHUd4G+NekTmmeq9fzsurvcKTNVa0comNljwtg4Hvi1ds3V+A==", + "license": "BSD-3-Clause" + }, + "node_modules/@lit-labs/ssr-dom-shim": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/@lit-labs/ssr-dom-shim/-/ssr-dom-shim-1.5.1.tgz", + "integrity": "sha512-Aou5UdlSpr5whQe8AA/bZG0jMj96CoJIWbGfZ91qieWu5AWUMKw8VR/pAkQkJYvBNhmCcWnZlyyk5oze8JIqYA==", + "license": "BSD-3-Clause" + }, + "node_modules/@modelcontextprotocol/ext-apps": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/ext-apps/-/ext-apps-1.0.1.tgz", + "integrity": "sha512-rAPzBbB5GNgYk216paQjGKUgbNXSy/yeR95c0ni6Y4uvhWI2AeF+ztEOqQFLBMQy/MPM+02pbVK1HaQmQjMwYQ==", + "hasInstallScript": true, + "license": "MIT", + "workspaces": [ + "examples/*" + ], + "optionalDependencies": { + "@oven/bun-darwin-aarch64": "^1.2.21", + "@oven/bun-darwin-x64": "^1.2.21", + "@oven/bun-darwin-x64-baseline": "^1.2.21", + "@oven/bun-linux-aarch64": "^1.2.21", + "@oven/bun-linux-aarch64-musl": "^1.2.21", + "@oven/bun-linux-x64": "^1.2.21", + "@oven/bun-linux-x64-baseline": "^1.2.21", + "@oven/bun-linux-x64-musl": "^1.2.21", + "@oven/bun-linux-x64-musl-baseline": "^1.2.21", + "@oven/bun-windows-x64": "^1.2.21", + "@oven/bun-windows-x64-baseline": "^1.2.21", + "@rollup/rollup-darwin-arm64": "^4.53.3", + "@rollup/rollup-darwin-x64": "^4.53.3", + "@rollup/rollup-linux-arm64-gnu": "^4.53.3", + "@rollup/rollup-linux-x64-gnu": "^4.53.3", + "@rollup/rollup-win32-arm64-msvc": "^4.53.3", + "@rollup/rollup-win32-x64-msvc": "^4.53.3" + }, + "peerDependencies": { + "@modelcontextprotocol/sdk": "^1.24.0", + "react": "^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0", + "zod": "^3.25.0 || ^4.0.0" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.26.0", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.26.0.tgz", + "integrity": "sha512-Y5RmPncpiDtTXDbLKswIJzTqu2hyBKxTNsgKqKclDbhIgg1wgtf1fRuvxgTnRfcnxtvvgbIEcqUOzZrJ6iSReg==", + "license": "MIT", + "peer": true, + "dependencies": { + "@hono/node-server": "^1.19.9", + "ajv": "^8.17.1", + "ajv-formats": "^3.0.1", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", + "express": "^5.2.1", + "express-rate-limit": "^8.2.1", + "hono": "^4.11.4", + "jose": "^6.1.3", + "json-schema-typed": "^8.0.2", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.25 || ^4.0", + "zod-to-json-schema": "^3.25.1" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@cfworker/json-schema": "^4.1.1", + "zod": "^3.25 || ^4.0" + }, + "peerDependenciesMeta": { + "@cfworker/json-schema": { + "optional": true + }, + "zod": { + "optional": false + } + } + }, + "node_modules/@oddbird/popover-polyfill": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@oddbird/popover-polyfill/-/popover-polyfill-0.3.8.tgz", + "integrity": "sha512-+aK7EHL3VggfsWGVqUwvtli2+kP5OWyseAsrefhzR2XWoi2oALUCeoDn63i5WS3ZOmLiXHRNBwHPeta8w+aM1g==", + "license": "BSD-3-Clause" + }, + "node_modules/@oven/bun-darwin-aarch64": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-darwin-aarch64/-/bun-darwin-aarch64-1.3.8.tgz", + "integrity": "sha512-hPERz4IgXCM6Y6GdEEsJAFceyJMt29f3HlFzsvE/k+TQjChRhar6S+JggL35b9VmFfsdxyCOOTPqgnSrdV0etA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@oven/bun-darwin-x64": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-darwin-x64/-/bun-darwin-x64-1.3.8.tgz", + "integrity": "sha512-SaWIxsRQYiT/eA60bqA4l8iNO7cJ6YD8ie82RerRp9voceBxPIZiwX4y20cTKy5qNaSGr9LxfYq7vDywTipiog==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@oven/bun-darwin-x64-baseline": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-darwin-x64-baseline/-/bun-darwin-x64-baseline-1.3.8.tgz", + "integrity": "sha512-ArHVWpCRZI3vGLoN2/8ud8Kzqlgn1Gv+fNw+pMB9x18IzgAEhKxFxsWffnoaH21amam4tAOhpeewRIgdNtB0Cw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@oven/bun-linux-aarch64": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-linux-aarch64/-/bun-linux-aarch64-1.3.8.tgz", + "integrity": "sha512-rq0nNckobtS+ONoB95/Frfqr8jCtmSjjjEZlN4oyUx0KEBV11Vj4v3cDVaWzuI34ryL8FCog3HaqjfKn8R82Tw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oven/bun-linux-aarch64-musl": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-linux-aarch64-musl/-/bun-linux-aarch64-musl-1.3.8.tgz", + "integrity": "sha512-HvJmhrfipL7GtuqFz6xNpmf27NGcCOMwCalPjNR6fvkLpe8A7Z1+QbxKKjOglelmlmZc3Vi2TgDUtxSqfqOToQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oven/bun-linux-x64": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-linux-x64/-/bun-linux-x64-1.3.8.tgz", + "integrity": "sha512-YDgqVx1MI8E0oDbCEUSkAMBKKGnUKfaRtMdLh9Bjhu7JQacQ/ZCpxwi4HPf5Q0O1TbWRrdxGw2tA2Ytxkn7s1Q==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oven/bun-linux-x64-baseline": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-linux-x64-baseline/-/bun-linux-x64-baseline-1.3.8.tgz", + "integrity": "sha512-3IkS3TuVOzMqPW6Gg9/8FEoKF/rpKZ9DZUfNy9GQ54+k4PGcXpptU3+dy8D4iDFCt4qe6bvoiAOdM44OOsZ+Wg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oven/bun-linux-x64-musl": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-linux-x64-musl/-/bun-linux-x64-musl-1.3.8.tgz", + "integrity": "sha512-o7Jm5zL4aw9UBs3BcZLVbgGm2V4F10MzAQAV+ziKzoEfYmYtvDqRVxgKEq7BzUOVy4LgfrfwzEXw5gAQGRrhQQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oven/bun-linux-x64-musl-baseline": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-linux-x64-musl-baseline/-/bun-linux-x64-musl-baseline-1.3.8.tgz", + "integrity": "sha512-5g8XJwHhcTh8SGoKO7pR54ILYDbuFkGo+68DOMTiVB5eLxuLET+Or/camHgk4QWp3nUS5kNjip4G8BE8i0rHVQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oven/bun-windows-x64": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-windows-x64/-/bun-windows-x64-1.3.8.tgz", + "integrity": "sha512-UDI3rowMm/tI6DIynpE4XqrOhr+1Ztk1NG707Wxv2nygup+anTswgCwjfjgmIe78LdoRNFrux2GpeolhQGW6vQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@oven/bun-windows-x64-baseline": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@oven/bun-windows-x64-baseline/-/bun-windows-x64-baseline-1.3.8.tgz", + "integrity": "sha512-K6qBUKAZLXsjAwFxGTG87dsWlDjyDl2fqjJr7+x7lmv2m+aSEzmLOK+Z5pSvGkpjBp3LXV35UUgj8G0UTd0pPg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@primer/behaviors": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@primer/behaviors/-/behaviors-1.10.1.tgz", + "integrity": "sha512-9iNr3ulh2W4zmp1e2COu3XBNjq/eqXbHkCvg2SMD/g8zSe7oBXa/FFg8gdaXmyykElfWRytvZkaJh14FrY22Gw==", + "license": "MIT" + }, + "node_modules/@primer/live-region-element": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/@primer/live-region-element/-/live-region-element-0.7.2.tgz", + "integrity": "sha512-wdxCHfcJzE1IPPjZNFR4RTwRcSWb7TN0fRdMH5HcxphLEnuZBWy0TAxk3xPA+/6lwiN3uEJ+ZWV4UF/glXh43A==", + "license": "MIT", + "dependencies": { + "@lit-labs/ssr-dom-shim": "^1.2.0" + } + }, + "node_modules/@primer/octicons-react": { + "version": "19.21.2", + "resolved": "https://registry.npmjs.org/@primer/octicons-react/-/octicons-react-19.21.2.tgz", + "integrity": "sha512-Bk+S08EpeeWLFscUxwEY8t5z14KxByhIbPG6OiYXSNrkbzN4fmRetnB/C+K1srn4BWuRSwwFxUwvDI2ytgNrFw==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "peerDependencies": { + "react": ">=16.3" + } + }, + "node_modules/@primer/primitives": { + "version": "7.17.1", + "resolved": "https://registry.npmjs.org/@primer/primitives/-/primitives-7.17.1.tgz", + "integrity": "sha512-SiPzEb+up1nDpV2NGwNiY8m6sGnF3OUqRb0has5s6T40vq6Li/g3cYVgl+oolEa4DUoNygEPs09jwJt24f/3zg==", + "license": "MIT" + }, + "node_modules/@primer/react": { + "version": "36.27.0", + "resolved": "https://registry.npmjs.org/@primer/react/-/react-36.27.0.tgz", + "integrity": "sha512-dVyp0f9zbbQYQZ6ztfMET43vVaWhvSz+qWirBzpRjDxvCk8vCQsvWrVGUU/PR0kAxxDHf6hqeLG7vcDL229NLA==", + "license": "MIT", + "dependencies": { + "@github/combobox-nav": "^2.1.5", + "@github/markdown-toolbar-element": "^2.1.0", + "@github/paste-markdown": "^1.4.0", + "@github/relative-time-element": "^4.4.1", + "@github/tab-container-element": "^4.8.0", + "@lit-labs/react": "1.2.1", + "@oddbird/popover-polyfill": "^0.3.1", + "@primer/behaviors": "^1.7.0", + "@primer/live-region-element": "^0.7.0", + "@primer/octicons-react": "^19.9.0", + "@primer/primitives": "^7.16.0", + "@styled-system/css": "^5.1.5", + "@styled-system/props": "^5.1.5", + "@styled-system/theme-get": "^5.1.2", + "@types/react-is": "^18.2.1", + "@types/styled-system": "^5.1.12", + "@types/styled-system__css": "^5.0.16", + "@types/styled-system__theme-get": "^5.0.1", + "clsx": "^1.2.1", + "color2k": "^2.0.3", + "deepmerge": "^4.2.2", + "focus-visible": "^5.2.0", + "fzy.js": "^0.4.1", + "history": "^5.0.0", + "lodash.isempty": "^4.4.0", + "lodash.isobject": "^3.0.2", + "react-intersection-observer": "^9.4.3", + "react-is": "^18.2.0", + "react-markdown": "8.0.7", + "styled-system": "^5.1.5" + }, + "engines": { + "node": ">=12", + "npm": ">=7" + }, + "peerDependencies": { + "@types/react": "^18.0.0", + "@types/react-dom": "^18.0.0", + "@types/styled-components": "^5.1.11", + "react": "^18.0.0", + "react-dom": "^18.0.0", + "styled-components": "5.x" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + }, + "@types/styled-components": { + "optional": true + } + } + }, + "node_modules/@primer/react/node_modules/@types/hast": { + "version": "2.3.10", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz", + "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/@primer/react/node_modules/@types/mdast": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz", + "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/@primer/react/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "license": "MIT" + }, + "node_modules/@primer/react/node_modules/hast-util-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz", + "integrity": "sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/inline-style-parser": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", + "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==", + "license": "MIT" + }, + "node_modules/@primer/react/node_modules/mdast-util-from-markdown": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz", + "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "mdast-util-to-string": "^3.1.0", + "micromark": "^3.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-decode-string": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-stringify-position": "^3.0.0", + "uvu": "^0.5.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/mdast-util-to-hast": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz", + "integrity": "sha512-pits93r8PhnIoU4Vy9bjW39M2jJ6/tdHyja9rrot9uujkN7UTU9SDnE6WNJz/IGyQk3XHX6yNNtrBH6cQzm8Hw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-definitions": "^5.0.0", + "micromark-util-sanitize-uri": "^1.1.0", + "trim-lines": "^3.0.0", + "unist-util-generated": "^2.0.0", + "unist-util-position": "^4.0.0", + "unist-util-visit": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/micromark": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz", + "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-core-commonmark": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz", + "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-factory-destination": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz", + "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-factory-label": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz", + "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-factory-space": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz", + "integrity": "sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-factory-title": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz", + "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-factory-whitespace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz", + "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-character": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz", + "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-chunked": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz", + "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-classify-character": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz", + "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-combine-extensions": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz", + "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-decode-numeric-character-reference": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz", + "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-decode-string": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz", + "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz", + "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/@primer/react/node_modules/micromark-util-html-tag-name": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz", + "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/@primer/react/node_modules/micromark-util-normalize-identifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz", + "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-resolve-all": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz", + "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-sanitize-uri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz", + "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-subtokenize": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz", + "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/@primer/react/node_modules/micromark-util-symbol": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz", + "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/@primer/react/node_modules/micromark-util-types": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz", + "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/@primer/react/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/@primer/react/node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "license": "MIT" + }, + "node_modules/@primer/react/node_modules/react-markdown": { + "version": "8.0.7", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-8.0.7.tgz", + "integrity": "sha512-bvWbzG4MtOU62XqBx3Xx+zB2raaFFsq4mYiAzfjXJMEz2sixgeAfraA3tvzULF02ZdOMUOKTBFFaZJDDrq+BJQ==", + "license": "MIT", + "dependencies": { + "@types/hast": "^2.0.0", + "@types/prop-types": "^15.0.0", + "@types/unist": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-whitespace": "^2.0.0", + "prop-types": "^15.0.0", + "property-information": "^6.0.0", + "react-is": "^18.0.0", + "remark-parse": "^10.0.0", + "remark-rehype": "^10.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-object": "^0.4.0", + "unified": "^10.0.0", + "unist-util-visit": "^4.0.0", + "vfile": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=16", + "react": ">=16" + } + }, + "node_modules/@primer/react/node_modules/remark-parse": { + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.2.tgz", + "integrity": "sha512-3ydxgHa/ZQzG8LvC7jTXccARYDcRld3VfcgIIFs7bI6vbRSxJJmzgLEIIoYKyrfhaY+ujuWaf/PJiMZXoiCXgw==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "unified": "^10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/remark-rehype": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-10.1.0.tgz", + "integrity": "sha512-EFmR5zppdBp0WQeDVZ/b66CWJipB2q2VLNFMabzDSGR66Z2fQii83G5gTBbgGEnEEA0QRussvrFHxk1HWGJskw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-to-hast": "^12.1.0", + "unified": "^10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/style-to-object": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.4.tgz", + "integrity": "sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg==", + "license": "MIT", + "dependencies": { + "inline-style-parser": "0.1.1" + } + }, + "node_modules/@primer/react/node_modules/unified": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "bail": "^2.0.0", + "extend": "^3.0.0", + "is-buffer": "^2.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/unist-util-position": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz", + "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/unist-util-stringify-position": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz", + "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/vfile": { + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz", + "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "is-buffer": "^2.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@primer/react/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz", + "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz", + "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz", + "integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz", + "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz", + "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz", + "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz", + "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz", + "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz", + "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz", + "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz", + "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz", + "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz", + "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz", + "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz", + "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz", + "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz", + "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz", + "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz", + "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz", + "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz", + "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz", + "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz", + "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz", + "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz", + "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@styled-system/background": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/background/-/background-5.1.2.tgz", + "integrity": "sha512-jtwH2C/U6ssuGSvwTN3ri/IyjdHb8W9X/g8Y0JLcrH02G+BW3OS8kZdHphF1/YyRklnrKrBT2ngwGUK6aqqV3A==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/border": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/@styled-system/border/-/border-5.1.5.tgz", + "integrity": "sha512-JvddhNrnhGigtzWRCVuAHepniyVi6hBlimxWDVAdcTuk7aRn9BYJUwfHslURtwYFsF5FoEs8Zmr1oZq2M1AP0A==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/color": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/color/-/color-5.1.2.tgz", + "integrity": "sha512-1kCkeKDZkt4GYkuFNKc7vJQMcOmTl3bJY3YBUs7fCNM6mMYJeT1pViQ2LwBSBJytj3AB0o4IdLBoepgSgGl5MA==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/core": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/core/-/core-5.1.2.tgz", + "integrity": "sha512-XclBDdNIy7OPOsN4HBsawG2eiWfCcuFt6gxKn1x4QfMIgeO6TOlA2pZZ5GWZtIhCUqEPTgIBta6JXsGyCkLBYw==", + "license": "MIT", + "dependencies": { + "object-assign": "^4.1.1" + } + }, + "node_modules/@styled-system/css": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/@styled-system/css/-/css-5.1.5.tgz", + "integrity": "sha512-XkORZdS5kypzcBotAMPBoeckDs9aSZVkvrAlq5K3xP8IMAUek+x2O4NtwoSgkYkWWzVBu6DGdFZLR790QWGG+A==", + "license": "MIT" + }, + "node_modules/@styled-system/flexbox": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/flexbox/-/flexbox-5.1.2.tgz", + "integrity": "sha512-6hHV52+eUk654Y1J2v77B8iLeBNtc+SA3R4necsu2VVinSD7+XY5PCCEzBFaWs42dtOEDIa2lMrgL0YBC01mDQ==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/grid": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/grid/-/grid-5.1.2.tgz", + "integrity": "sha512-K3YiV1KyHHzgdNuNlaw8oW2ktMuGga99o1e/NAfTEi5Zsa7JXxzwEnVSDSBdJC+z6R8WYTCYRQC6bkVFcvdTeg==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/layout": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/layout/-/layout-5.1.2.tgz", + "integrity": "sha512-wUhkMBqSeacPFhoE9S6UF3fsMEKFv91gF4AdDWp0Aym1yeMPpqz9l9qS/6vjSsDPF7zOb5cOKC3tcKKOMuDCPw==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/position": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/position/-/position-5.1.2.tgz", + "integrity": "sha512-60IZfMXEOOZe3l1mCu6sj/2NAyUmES2kR9Kzp7s2D3P4qKsZWxD1Se1+wJvevb+1TP+ZMkGPEYYXRyU8M1aF5A==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/props": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/@styled-system/props/-/props-5.1.5.tgz", + "integrity": "sha512-FXhbzq2KueZpGaHxaDm8dowIEWqIMcgsKs6tBl6Y6S0njG9vC8dBMI6WSLDnzMoSqIX3nSKHmOmpzpoihdDewg==", + "license": "MIT", + "dependencies": { + "styled-system": "^5.1.5" + } + }, + "node_modules/@styled-system/shadow": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/shadow/-/shadow-5.1.2.tgz", + "integrity": "sha512-wqniqYb7XuZM7K7C0d1Euxc4eGtqEe/lvM0WjuAFsQVImiq6KGT7s7is+0bNI8O4Dwg27jyu4Lfqo/oIQXNzAg==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/space": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/space/-/space-5.1.2.tgz", + "integrity": "sha512-+zzYpR8uvfhcAbaPXhH8QgDAV//flxqxSjHiS9cDFQQUSznXMQmxJegbhcdEF7/eNnJgHeIXv1jmny78kipgBA==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/theme-get": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/theme-get/-/theme-get-5.1.2.tgz", + "integrity": "sha512-afAYdRqrKfNIbVgmn/2Qet1HabxmpRnzhFwttbGr6F/mJ4RDS/Cmn+KHwHvNXangQsWw/5TfjpWV+rgcqqIcJQ==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/typography": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/typography/-/typography-5.1.2.tgz", + "integrity": "sha512-BxbVUnN8N7hJ4aaPOd7wEsudeT7CxarR+2hns8XCX1zp0DFfbWw4xYa/olA0oQaqx7F1hzDg+eRaGzAJbF+jOg==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2" + } + }, + "node_modules/@styled-system/variant": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/@styled-system/variant/-/variant-5.1.5.tgz", + "integrity": "sha512-Yn8hXAFoWIro8+Q5J8YJd/mP85Teiut3fsGVR9CAxwgNfIAiqlYxsk5iHU7VHJks/0KjL4ATSjmbtCDC/4l1qw==", + "license": "MIT", + "dependencies": { + "@styled-system/core": "^5.1.2", + "@styled-system/css": "^5.1.5" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "license": "MIT" + }, + "node_modules/@types/estree-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "25.2.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.2.0.tgz", + "integrity": "sha512-DZ8VwRFUNzuqJ5khrvwMXHmvPe+zGayJhr2CDNiKB1WBE1ST8Djl00D0IC4vvNmHMdj6DlbYRIaFE7WHjlDl5w==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.27", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.27.tgz", + "integrity": "sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==", + "license": "MIT", + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", + "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "devOptional": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@types/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/@types/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-zts4lhQn5ia0cF/y2+3V6Riu0MAfez9/LJYavdM8TvcVl+S91A/7VWxyBT8hbRuWspmuCaiGI0F41OJYGrKhRA==", + "license": "MIT", + "dependencies": { + "@types/react": "^18" + } + }, + "node_modules/@types/styled-system": { + "version": "5.1.25", + "resolved": "https://registry.npmjs.org/@types/styled-system/-/styled-system-5.1.25.tgz", + "integrity": "sha512-B1oyjE4oeAbVnkigcB0WqU2gPFuTwLV/KkLa/uJZWFB9JWVKq1Fs0QwodZXZ9Sq6cb9ngY4kDqRY/dictIchjA==", + "license": "MIT", + "dependencies": { + "csstype": "^3.2.2" + } + }, + "node_modules/@types/styled-system__css": { + "version": "5.0.22", + "resolved": "https://registry.npmjs.org/@types/styled-system__css/-/styled-system__css-5.0.22.tgz", + "integrity": "sha512-1oOWbdcL1SE2t6hTC3LlwrVHK3Z1Py4KYFehl6NL2XcLxS/L0ELEmN6APNWIYqUywPdeaKlQkRpV5dn0trLjGA==", + "license": "MIT", + "dependencies": { + "csstype": "^3.2.2" + } + }, + "node_modules/@types/styled-system__theme-get": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/styled-system__theme-get/-/styled-system__theme-get-5.0.4.tgz", + "integrity": "sha512-dbzwxQ+8x6Bo3EKZMo9M3Knzo77ukwoC/isKW+GAuF5TenXlPkvgzx4t4+Lp0+fKs2M4owSef0KO3gtGW3Hpkw==", + "license": "MIT" + }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "license": "ISC" + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "peer": true, + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "license": "MIT", + "peer": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/babel-plugin-styled-components": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/babel-plugin-styled-components/-/babel-plugin-styled-components-2.1.4.tgz", + "integrity": "sha512-Xgp9g+A/cG47sUyRwwYxGM4bR/jDRg5N6it/8+HxCnbT5XNKSKDT9xm4oag/osgqjC2It/vH0yXsomOG6k558g==", + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-module-imports": "^7.22.5", + "@babel/plugin-syntax-jsx": "^7.22.5", + "lodash": "^4.17.21", + "picomatch": "^2.3.1" + }, + "peerDependencies": { + "styled-components": ">= 2" + } + }, + "node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.19", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.19.tgz", + "integrity": "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==", + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/body-parser": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.2.tgz", + "integrity": "sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==", + "license": "MIT", + "peer": true, + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.3", + "http-errors": "^2.0.0", + "iconv-lite": "^0.7.0", + "on-finished": "^2.4.1", + "qs": "^6.14.1", + "raw-body": "^3.0.1", + "type-is": "^2.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/camelize": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/camelize/-/camelize-1.0.1.tgz", + "integrity": "sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==", + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001767", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001767.tgz", + "integrity": "sha512-34+zUAMhSH+r+9eKmYG+k2Rpt8XttfE4yXAjoZvkAPs15xcYQhyBYdalJ65BzivAvGRMViEjy6oKr/S91loekQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/clsx": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", + "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color2k": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/color2k/-/color2k-2.0.3.tgz", + "integrity": "sha512-zW190nQTIoXcGCaU08DvVNFTmQhUpnJfVuAKfWqUQkflXKpaDdpaYoM0iluLS9lgJNHyBF58KKA2FBEwkD7wog==", + "license": "MIT" + }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/content-disposition": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", + "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.6", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.6.tgz", + "integrity": "sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==", + "license": "MIT", + "peer": true, + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/cross-env": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", + "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "bin": { + "cross-env": "src/bin/cross-env.js", + "cross-env-shell": "src/bin/cross-env-shell.js" + }, + "engines": { + "node": ">=10.14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-color-keywords": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/css-color-keywords/-/css-color-keywords-1.0.0.tgz", + "integrity": "sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg==", + "license": "ISC", + "peer": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/css-to-react-native": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.2.0.tgz", + "integrity": "sha512-e8RKaLXMOFii+02mOlqwjbD00KSEKqblnpO9e++1aXS1fPQOpS1YoqdVHBqPjHNoxeF2mimzVqawm2KCbEdtHQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "camelize": "^1.0.0", + "css-color-keywords": "^1.0.0", + "postcss-value-parser": "^4.0.2" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decode-named-character-reference": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.3.0.tgz", + "integrity": "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==", + "license": "MIT", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/diff": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.2.tgz", + "integrity": "sha512-vtcDfH3TOjP8UekytvnHH1o1P4FcUdt4eQ1Y+Abap1tk/OB2MWQvcwS2ClCd1zuIhc3JKOx6p3kod8Vfys3E+A==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT", + "peer": true + }, + "node_modules/electron-to-chromium": { + "version": "1.5.286", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz", + "integrity": "sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==", + "license": "ISC" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", + "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.12", + "@esbuild/android-arm": "0.25.12", + "@esbuild/android-arm64": "0.25.12", + "@esbuild/android-x64": "0.25.12", + "@esbuild/darwin-arm64": "0.25.12", + "@esbuild/darwin-x64": "0.25.12", + "@esbuild/freebsd-arm64": "0.25.12", + "@esbuild/freebsd-x64": "0.25.12", + "@esbuild/linux-arm": "0.25.12", + "@esbuild/linux-arm64": "0.25.12", + "@esbuild/linux-ia32": "0.25.12", + "@esbuild/linux-loong64": "0.25.12", + "@esbuild/linux-mips64el": "0.25.12", + "@esbuild/linux-ppc64": "0.25.12", + "@esbuild/linux-riscv64": "0.25.12", + "@esbuild/linux-s390x": "0.25.12", + "@esbuild/linux-x64": "0.25.12", + "@esbuild/netbsd-arm64": "0.25.12", + "@esbuild/netbsd-x64": "0.25.12", + "@esbuild/openbsd-arm64": "0.25.12", + "@esbuild/openbsd-x64": "0.25.12", + "@esbuild/openharmony-arm64": "0.25.12", + "@esbuild/sunos-x64": "0.25.12", + "@esbuild/win32-arm64": "0.25.12", + "@esbuild/win32-ia32": "0.25.12", + "@esbuild/win32-x64": "0.25.12" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT", + "peer": true + }, + "node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "peer": true, + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz", + "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/express": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz", + "integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==", + "license": "MIT", + "peer": true, + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.1", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "depd": "^2.0.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "8.2.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-8.2.1.tgz", + "integrity": "sha512-PCZEIEIxqwhzw4KF0n7QF4QqruVTcF73O5kFKUnGOyjbCCgizBBiFaYpd/fnBLUMPw/BWw9OsiN7GgrNYr7j6g==", + "license": "MIT", + "peer": true, + "dependencies": { + "ip-address": "10.0.1" + }, + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT", + "peer": true + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause", + "peer": true + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz", + "integrity": "sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==", + "license": "MIT", + "peer": true, + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/focus-visible": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/focus-visible/-/focus-visible-5.2.1.tgz", + "integrity": "sha512-8Bx950VD1bWTQJEH/AM6SpEk+SU55aVnp4Ujhuuxy3eMEBCRwBnTBnVXr9YAPvZL3/CNjCa8u4IWfNmEO53whA==", + "license": "W3C" + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/fzy.js": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/fzy.js/-/fzy.js-0.4.1.tgz", + "integrity": "sha512-4sPVXf+9oGhzg2tYzgWe4hgAY0wEbkqeuKVEgdnqX8S8VcLosQsDjb0jV+f5uoQlf8INWId1w0IGoufAoik1TA==", + "license": "MIT" + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "peer": true, + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hast-util-to-jsx-runtime": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz", + "integrity": "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-js": "^1.0.0", + "unist-util-position": "^5.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/history": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/history/-/history-5.3.0.tgz", + "integrity": "sha512-ZqaKwjjrAYUYfLG+htGaIIZ4nioX2L70ZUMIFysS3xvBsSG4x/n1V6TXV3N8ZYNuFGlDirFg32T7B6WOUPDYcQ==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.7.6" + } + }, + "node_modules/hoist-non-react-statics": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", + "license": "BSD-3-Clause", + "peer": true, + "dependencies": { + "react-is": "^16.7.0" + } + }, + "node_modules/hoist-non-react-statics/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "license": "MIT", + "peer": true + }, + "node_modules/hono": { + "version": "4.11.7", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.11.7.tgz", + "integrity": "sha512-l7qMiNee7t82bH3SeyUCt9UF15EVmaBvsppY2zQtrbIhl/yzBTny+YUxsVjSjQ6gaqaeVtZmGocom8TzBlA4Yw==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=16.9.0" + } + }, + "node_modules/html-url-attributes": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.1.tgz", + "integrity": "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/iconv-lite": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz", + "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==", + "license": "MIT", + "peer": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC", + "peer": true + }, + "node_modules/inline-style-parser": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz", + "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==", + "license": "MIT" + }, + "node_modules/ip-address": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.0.1.tgz", + "integrity": "sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "license": "MIT", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-buffer": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", + "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT", + "peer": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/jose": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/jose/-/jose-6.1.3.tgz", + "integrity": "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==", + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "license": "MIT", + "peer": true + }, + "node_modules/json-schema-typed": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/json-schema-typed/-/json-schema-typed-8.0.2.tgz", + "integrity": "sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==", + "license": "BSD-2-Clause", + "peer": true + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "license": "MIT", + "peer": true + }, + "node_modules/lodash.isempty": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz", + "integrity": "sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==", + "license": "MIT" + }, + "node_modules/lodash.isobject": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/lodash.isobject/-/lodash.isobject-3.0.2.tgz", + "integrity": "sha512-3/Qptq2vr7WeJbB4KHUSKlq8Pl7ASXi3UG6CMbBm8WRtXi8+GHm7mKaU3urfpSEzWe2wCIChs6/sdocUsTKJiA==", + "license": "MIT" + }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mdast-util-definitions": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz", + "integrity": "sha512-8SVPMuHqlPME/z3gqVwWY4zVXn8lqKv/pAhC57FuJ40ImXyBpmO5ukh98zB2v7Blql2FiHjHv9LVztSIqjY+MA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "unist-util-visit": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-definitions/node_modules/@types/mdast": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz", + "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/mdast-util-definitions/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "license": "MIT" + }, + "node_modules/mdast-util-definitions/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-definitions/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-find-and-replace": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz", + "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz", + "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz", + "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==", + "license": "MIT", + "dependencies": { + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-gfm-autolink-literal": "^2.0.0", + "mdast-util-gfm-footnote": "^2.0.0", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", + "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-find-and-replace": "^3.0.0", + "micromark-util-character": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", + "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz", + "integrity": "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", + "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", + "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/micromark": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", + "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", + "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", + "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", + "license": "MIT", + "dependencies": { + "micromark-extension-gfm-autolink-literal": "^2.0.0", + "micromark-extension-gfm-footnote": "^2.0.0", + "micromark-extension-gfm-strikethrough": "^2.0.0", + "micromark-extension-gfm-table": "^2.0.0", + "micromark-extension-gfm-tagfilter": "^2.0.0", + "micromark-extension-gfm-task-list-item": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", + "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", + "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-table": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz", + "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", + "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", + "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-factory-destination": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", + "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", + "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", + "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", + "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", + "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", + "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", + "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", + "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", + "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", + "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", + "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", + "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", + "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", + "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", + "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", + "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", + "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", + "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", + "license": "MIT", + "peer": true, + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "license": "MIT" + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "peer": true, + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "peer": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/parse-entities": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz", + "integrity": "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/parse-entities/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "license": "MIT" + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz", + "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==", + "license": "MIT", + "peer": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.1.tgz", + "integrity": "sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "license": "MIT", + "peer": true + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/prop-types/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "license": "MIT" + }, + "node_modules/property-information": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "peer": true, + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/qs": { + "version": "6.14.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.2.tgz", + "integrity": "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q==", + "license": "BSD-3-Clause", + "peer": true, + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.2.tgz", + "integrity": "sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==", + "license": "MIT", + "peer": true, + "dependencies": { + "bytes": "~3.1.2", + "http-errors": "~2.0.1", + "iconv-lite": "~0.7.0", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-intersection-observer": { + "version": "9.16.0", + "resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.16.0.tgz", + "integrity": "sha512-w9nJSEp+DrW9KmQmeWHQyfaP6b03v+TdXynaoA964Wxt7mdR3An11z4NNCQgL4gKSK7y1ver2Fq+JKH6CWEzUA==", + "license": "MIT", + "peerDependencies": { + "react": "^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } + } + }, + "node_modules/react-is": { + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-19.2.4.tgz", + "integrity": "sha512-W+EWGn2v0ApPKgKKCy/7s7WHXkboGcsrXE+2joLyVxkbyVQfO3MUEaUQDHoSmb8TFFrSKYa9mw64WZHNHSDzYA==", + "license": "MIT", + "peer": true + }, + "node_modules/react-markdown": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-10.1.0.tgz", + "integrity": "sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "hast-util-to-jsx-runtime": "^2.0.0", + "html-url-attributes": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=18", + "react": ">=18" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/remark-gfm": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz", + "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-gfm": "^3.0.0", + "micromark-extension-gfm": "^3.0.0", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-parse": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", + "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz", + "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-stringify": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", + "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-to-markdown": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz", + "integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.57.1", + "@rollup/rollup-android-arm64": "4.57.1", + "@rollup/rollup-darwin-arm64": "4.57.1", + "@rollup/rollup-darwin-x64": "4.57.1", + "@rollup/rollup-freebsd-arm64": "4.57.1", + "@rollup/rollup-freebsd-x64": "4.57.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.57.1", + "@rollup/rollup-linux-arm-musleabihf": "4.57.1", + "@rollup/rollup-linux-arm64-gnu": "4.57.1", + "@rollup/rollup-linux-arm64-musl": "4.57.1", + "@rollup/rollup-linux-loong64-gnu": "4.57.1", + "@rollup/rollup-linux-loong64-musl": "4.57.1", + "@rollup/rollup-linux-ppc64-gnu": "4.57.1", + "@rollup/rollup-linux-ppc64-musl": "4.57.1", + "@rollup/rollup-linux-riscv64-gnu": "4.57.1", + "@rollup/rollup-linux-riscv64-musl": "4.57.1", + "@rollup/rollup-linux-s390x-gnu": "4.57.1", + "@rollup/rollup-linux-x64-gnu": "4.57.1", + "@rollup/rollup-linux-x64-musl": "4.57.1", + "@rollup/rollup-openbsd-x64": "4.57.1", + "@rollup/rollup-openharmony-arm64": "4.57.1", + "@rollup/rollup-win32-arm64-msvc": "4.57.1", + "@rollup/rollup-win32-ia32-msvc": "4.57.1", + "@rollup/rollup-win32-x64-gnu": "4.57.1", + "@rollup/rollup-win32-x64-msvc": "4.57.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "license": "MIT", + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT", + "peer": true + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/send": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.1.tgz", + "integrity": "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "debug": "^4.4.3", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.1", + "mime-types": "^3.0.2", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/serve-static": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz", + "integrity": "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==", + "license": "MIT", + "peer": true, + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC", + "peer": true + }, + "node_modules/shallowequal": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", + "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==", + "license": "MIT", + "peer": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "license": "MIT", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/style-to-js": { + "version": "1.1.21", + "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz", + "integrity": "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==", + "license": "MIT", + "dependencies": { + "style-to-object": "1.0.14" + } + }, + "node_modules/style-to-object": { + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz", + "integrity": "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==", + "license": "MIT", + "dependencies": { + "inline-style-parser": "0.2.7" + } + }, + "node_modules/styled-components": { + "version": "5.3.11", + "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.11.tgz", + "integrity": "sha512-uuzIIfnVkagcVHv9nE0VPlHPSCmXIUGKfJ42LNjxCCTDTL5sgnJ8Z7GZBq0EnLYGln77tPpEpExt2+qa+cZqSw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/helper-module-imports": "^7.0.0", + "@babel/traverse": "^7.4.5", + "@emotion/is-prop-valid": "^1.1.0", + "@emotion/stylis": "^0.8.4", + "@emotion/unitless": "^0.7.4", + "babel-plugin-styled-components": ">= 1.12.0", + "css-to-react-native": "^3.0.0", + "hoist-non-react-statics": "^3.0.0", + "shallowequal": "^1.1.0", + "supports-color": "^5.5.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/styled-components" + }, + "peerDependencies": { + "react": ">= 16.8.0", + "react-dom": ">= 16.8.0", + "react-is": ">= 16.8.0" + } + }, + "node_modules/styled-system": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/styled-system/-/styled-system-5.1.5.tgz", + "integrity": "sha512-7VoD0o2R3RKzOzPK0jYrVnS8iJdfkKsQJNiLRDjikOpQVqQHns/DXWaPZOH4tIKkhAT7I6wIsy9FWTWh2X3q+A==", + "license": "MIT", + "dependencies": { + "@styled-system/background": "^5.1.2", + "@styled-system/border": "^5.1.5", + "@styled-system/color": "^5.1.2", + "@styled-system/core": "^5.1.2", + "@styled-system/flexbox": "^5.1.2", + "@styled-system/grid": "^5.1.2", + "@styled-system/layout": "^5.1.2", + "@styled-system/position": "^5.1.2", + "@styled-system/shadow": "^5.1.2", + "@styled-system/space": "^5.1.2", + "@styled-system/typography": "^5.1.2", + "@styled-system/variant": "^5.1.5", + "object-assign": "^4.1.1" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "license": "MIT", + "peer": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/trough": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", + "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "peer": true, + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true, + "license": "MIT" + }, + "node_modules/unified": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", + "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "bail": "^2.0.0", + "devlop": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-generated": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz", + "integrity": "sha512-qF72kLmPxAw0oN2fwpWIqbXAVyEqUzDHMsbtPvOudIlUzXYFIeQIuxXQCRCFh22B7cixvU0MG7m3MW8FTq/S+A==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-is": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", + "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz", + "integrity": "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", + "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uvu": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz", + "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0", + "diff": "^5.0.0", + "kleur": "^4.0.3", + "sade": "^1.7.3" + }, + "bin": { + "uvu": "bin.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", + "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vite": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", + "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite-plugin-singlefile": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/vite-plugin-singlefile/-/vite-plugin-singlefile-2.3.0.tgz", + "integrity": "sha512-DAcHzYypM0CasNLSz/WG0VdKOCxGHErfrjOoyIPiNxTPTGmO6rRD/te93n1YL/s+miXq66ipF1brMBikf99c6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">18.0.0" + }, + "peerDependencies": { + "rollup": "^4.44.1", + "vite": "^5.4.11 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/vite/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC", + "peer": true + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "license": "ISC" + }, + "node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.25.1", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.25.1.tgz", + "integrity": "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA==", + "license": "ISC", + "peer": true, + "peerDependencies": { + "zod": "^3.25 || ^4" + } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + } + } +} diff --git a/ui/package.json b/ui/package.json new file mode 100644 index 0000000000..6b26ca3161 --- /dev/null +++ b/ui/package.json @@ -0,0 +1,36 @@ +{ + "name": "@github/mcp-server-ui", + "version": "1.0.0", + "private": true, + "type": "module", + "description": "MCP App UIs for github-mcp-server using Primer React", + "scripts": { + "build": "npm run build:get-me && npm run build:issue-write && npm run build:pr-write", + "build:get-me": "cross-env APP=get-me vite build", + "build:issue-write": "cross-env APP=issue-write vite build", + "build:pr-write": "cross-env APP=pr-write vite build", + "dev": "npm run build", + "typecheck": "tsc --noEmit", + "clean": "rm -rf dist" + }, + "dependencies": { + "@github/markdown-toolbar-element": "^2.2.3", + "@modelcontextprotocol/ext-apps": "^1.0.0", + "@primer/octicons-react": "^19.0.0", + "@primer/react": "^36.0.0", + "react": "^18.0.0", + "react-dom": "^18.0.0", + "react-markdown": "^10.1.0", + "remark-gfm": "^4.0.1" + }, + "devDependencies": { + "@types/node": "^25.2.0", + "@types/react": "^18.0.0", + "@types/react-dom": "^18.0.0", + "@vitejs/plugin-react": "^4.3.0", + "cross-env": "^7.0.3", + "typescript": "^5.7.0", + "vite": "^6.0.0", + "vite-plugin-singlefile": "^2.0.0" + } +} diff --git a/ui/src/apps/get-me/App.tsx b/ui/src/apps/get-me/App.tsx new file mode 100644 index 0000000000..a20aae17c5 --- /dev/null +++ b/ui/src/apps/get-me/App.tsx @@ -0,0 +1,180 @@ +import { StrictMode, useState } from "react"; +import { createRoot } from "react-dom/client"; +import { Avatar, Box, Text, Link, Heading, Spinner } from "@primer/react"; +import { + OrganizationIcon, + LocationIcon, + LinkIcon, + MailIcon, + PeopleIcon, + RepoIcon, + PersonIcon, +} from "@primer/octicons-react"; +import { AppProvider } from "../../components/AppProvider"; +import { useMcpApp } from "../../hooks/useMcpApp"; + +interface UserData { + login: string; + avatar_url?: string; + details?: { + name?: string; + company?: string; + location?: string; + blog?: string; + email?: string; + twitter_username?: string; + public_repos?: number; + followers?: number; + following?: number; + }; +} + +function AvatarWithFallback({ src, login, size }: { src?: string; login: string; size: number }) { + const [imgError, setImgError] = useState(false); + + if (!src || imgError) { + return ( + + + + ); + } + + return ( + setImgError(true)} + /> + ); +} + +function UserCard({ user }: { user: UserData }) { + const d = user.details || {}; + + return ( + + {/* Header with avatar and name */} + + + + + {d.name || user.login} + + @{user.login} + + + + {/* Info grid */} + + {d.company && ( + <> + + {d.company} + + )} + {d.location && ( + <> + + {d.location} + + )} + {d.blog && ( + <> + + {d.blog} + + )} + {d.email && ( + <> + + {d.email} + + )} + + + {/* Stats */} + + + + {d.public_repos ?? 0} + + Repos + + + + {d.followers ?? 0} + + Followers + + + + {d.following ?? 0} + + Following + + + + ); +} + +function GetMeApp() { + const { error, toolResult } = useMcpApp({ + appName: "github-mcp-server-get-me", + }); + + if (error) { + return Error: {error.message}; + } + + if (!toolResult) { + return ( + + + Loading user data... + + ); + } + + // Parse user data from tool result + const textContent = toolResult.content?.find((c: { type: string }) => c.type === "text"); + if (!textContent || !("text" in textContent)) { + return No user data in response; + } + + try { + const userData = JSON.parse(textContent.text as string) as UserData; + return ; + } catch { + return Failed to parse user data; + } +} + +createRoot(document.getElementById("root")!).render( + + + + + +); diff --git a/ui/src/apps/get-me/index.html b/ui/src/apps/get-me/index.html new file mode 100644 index 0000000000..dee7373d0c --- /dev/null +++ b/ui/src/apps/get-me/index.html @@ -0,0 +1,13 @@ + + + + + + + GitHub User Profile + + +
+ + + diff --git a/ui/src/apps/issue-write/App.tsx b/ui/src/apps/issue-write/App.tsx new file mode 100644 index 0000000000..de72b0a78a --- /dev/null +++ b/ui/src/apps/issue-write/App.tsx @@ -0,0 +1,318 @@ +import { StrictMode, useState, useCallback, useEffect } from "react"; +import { createRoot } from "react-dom/client"; +import { + Box, + Text, + TextInput, + Button, + Flash, + Spinner, + FormControl, +} from "@primer/react"; +import { + IssueOpenedIcon, + CheckCircleIcon, +} from "@primer/octicons-react"; +import { AppProvider } from "../../components/AppProvider"; +import { useMcpApp } from "../../hooks/useMcpApp"; +import { MarkdownEditor } from "../../components/MarkdownEditor"; + +interface IssueResult { + ID?: string; + number?: number; + title?: string; + body?: string; + url?: string; + html_url?: string; + URL?: string; +} + +function SuccessView({ + issue, + owner, + repo, + submittedTitle, + isUpdate, +}: { + issue: IssueResult; + owner: string; + repo: string; + submittedTitle: string; + isUpdate: boolean; +}) { + const issueUrl = issue.html_url || issue.url || issue.URL || "#"; + + return ( + + + + + + + {isUpdate ? "Issue updated successfully" : "Issue created successfully"} + + + + + + + + + + {issue.title || submittedTitle} + {issue.number && ( + + #{issue.number} + + )} + + + {owner}/{repo} + + + + + ); +} + +function CreateIssueApp() { + const [title, setTitle] = useState(""); + const [body, setBody] = useState(""); + const [isSubmitting, setIsSubmitting] = useState(false); + const [error, setError] = useState(null); + const [successIssue, setSuccessIssue] = useState(null); + + const { app, error: appError, toolInput, callTool } = useMcpApp({ + appName: "github-mcp-server-issue-write", + }); + + const method = (toolInput?.method as string) || "create"; + const issueNumber = toolInput?.issue_number as number | undefined; + const isUpdateMode = method === "update" && issueNumber !== undefined; + const owner = (toolInput?.owner as string) || ""; + const repo = (toolInput?.repo as string) || ""; + + // Pre-fill from toolInput + useEffect(() => { + if (toolInput?.title) setTitle(toolInput.title as string); + if (toolInput?.body) setBody(toolInput.body as string); + }, [toolInput]); + + const handleSubmit = useCallback(async () => { + if (!title.trim()) { + setError("Title is required"); + return; + } + if (!owner || !repo) { + setError("Repository information not available"); + return; + } + + setIsSubmitting(true); + setError(null); + + try { + const params: Record = { + method: isUpdateMode ? "update" : "create", + owner, + repo, + title: title.trim(), + body: body.trim(), + _ui_submitted: true + }; + + if (isUpdateMode && issueNumber) { + params.issue_number = issueNumber; + } + + const result = await callTool("issue_write", params); + + if (result.isError) { + const textContent = result.content?.find( + (c: { type: string }) => c.type === "text" + ); + setError( + (textContent as { text?: string })?.text || "Failed to create issue" + ); + } else { + const textContent = result.content?.find( + (c: { type: string }) => c.type === "text" + ); + if (textContent && "text" in textContent) { + try { + const issueData = JSON.parse(textContent.text as string); + setSuccessIssue(issueData); + } catch { + setSuccessIssue({ title, body }); + } + } + } + } catch (e) { + setError(`Error: ${e instanceof Error ? e.message : String(e)}`); + } finally { + setIsSubmitting(false); + } + }, [title, body, owner, repo, isUpdateMode, issueNumber, callTool]); + + if (appError) { + return ( + + Connection error: {appError.message} + + ); + } + + if (!app) { + return ( + + + + ); + } + + if (successIssue) { + return ( + + ); + } + + return ( + + {/* Header */} + + + + + + {isUpdateMode ? `Update issue #${issueNumber}` : "New issue"} + + + {owner}/{repo} + + + + {/* Error banner */} + {error && ( + + {error} + + )} + + {/* Title */} + + + Title + + setTitle(e.target.value)} + placeholder="Title" + block + contrast + /> + + + {/* Description */} + + + Description + + + + + {/* Submit button */} + + + + + ); +} + +createRoot(document.getElementById("root")!).render( + + + + + +); diff --git a/ui/src/apps/issue-write/index.html b/ui/src/apps/issue-write/index.html new file mode 100644 index 0000000000..e1e34c391a --- /dev/null +++ b/ui/src/apps/issue-write/index.html @@ -0,0 +1,12 @@ + + + + + + Create GitHub Issue + + +
+ + + diff --git a/ui/src/apps/pr-write/App.tsx b/ui/src/apps/pr-write/App.tsx new file mode 100644 index 0000000000..f5ddbdf29d --- /dev/null +++ b/ui/src/apps/pr-write/App.tsx @@ -0,0 +1,337 @@ +import { StrictMode, useState, useCallback, useEffect } from "react"; +import { createRoot } from "react-dom/client"; +import { + Box, + Text, + TextInput, + Button, + Flash, + Spinner, + FormControl, + ActionMenu, + ActionList, + Checkbox, + ButtonGroup, +} from "@primer/react"; +import { + GitPullRequestIcon, + CheckCircleIcon, + TriangleDownIcon, +} from "@primer/octicons-react"; +import { AppProvider } from "../../components/AppProvider"; +import { useMcpApp } from "../../hooks/useMcpApp"; +import { MarkdownEditor } from "../../components/MarkdownEditor"; + +interface PRResult { + ID?: string; + number?: number; + title?: string; + url?: string; + html_url?: string; + URL?: string; +} + +function SuccessView({ + pr, + owner, + repo, + submittedTitle, +}: { + pr: PRResult; + owner: string; + repo: string; + submittedTitle: string; +}) { + const prUrl = pr.html_url || pr.url || pr.URL || "#"; + + return ( + + + + + + + Pull request created successfully + + + + + + + + + + {pr.title || submittedTitle} + {pr.number && ( + + #{pr.number} + + )} + + + {owner}/{repo} + + + + + ); +} + +function CreatePRApp() { + const [title, setTitle] = useState(""); + const [body, setBody] = useState(""); + const [isSubmitting, setIsSubmitting] = useState(false); + const [error, setError] = useState(null); + const [successPR, setSuccessPR] = useState(null); + + const [isDraft, setIsDraft] = useState(false); + const [maintainerCanModify, setMaintainerCanModify] = useState(true); + + const { app, error: appError, toolInput, callTool } = useMcpApp({ + appName: "github-mcp-server-create-pull-request", + }); + + const owner = (toolInput?.owner as string) || ""; + const repo = (toolInput?.repo as string) || ""; + const head = (toolInput?.head as string) || ""; + const base = (toolInput?.base as string) || ""; + const [submittedTitle, setSubmittedTitle] = useState(""); + + // Pre-fill from toolInput + useEffect(() => { + if (toolInput?.title) setTitle(toolInput.title as string); + if (toolInput?.body) setBody(toolInput.body as string); + if (toolInput?.draft) setIsDraft(toolInput.draft as boolean); + if (toolInput?.maintainer_can_modify !== undefined) { + setMaintainerCanModify(toolInput.maintainer_can_modify as boolean); + } + }, [toolInput]); + + const handleSubmit = useCallback(async () => { + if (!title.trim()) { setError("Title is required"); return; } + if (!owner || !repo) { setError("Repository information not available"); return; } + + setIsSubmitting(true); + setError(null); + setSubmittedTitle(title); + + try { + const result = await callTool("create_pull_request", { + owner, repo, + title: title.trim(), + body: body.trim(), + head, + base, + draft: isDraft, + maintainer_can_modify: maintainerCanModify, + _ui_submitted: true + }); + + if (result.isError) { + const errorText = result.content?.find((c) => c.type === "text"); + const errorMessage = errorText && errorText.type === "text" ? errorText.text : "Failed to create pull request"; + setError(errorMessage); + } else { + const textContent = result.content?.find((c) => c.type === "text"); + if (textContent && textContent.type === "text" && textContent.text) { + const prData = JSON.parse(textContent.text); + setSuccessPR(prData); + } + } + } catch (e) { + setError(e instanceof Error ? e.message : "An error occurred"); + } finally { + setIsSubmitting(false); + } + }, [title, body, owner, repo, head, base, isDraft, maintainerCanModify, callTool]); + + if (successPR) { + return ( + + + + ); + } + + if (!app && !appError) { + return ( + + + + + + ); + } + + if (appError) { + return ( + + {appError.message} + + ); + } + + return ( + + + {/* Header */} + + + + + New pull request + + {owner}/{repo} + + {head && base && ( + + {base} ← {head} + + )} + + + {/* Error banner */} + {error && {error}} + + {/* Title */} + + Title + setTitle(e.target.value)} + placeholder="Title" + block + contrast + /> + + + {/* Description */} + + + Description + + + + + {/* Options and Submit */} + + + setMaintainerCanModify(e.target.checked)} /> + Allow maintainer edits + + + + + + + + + + + setIsDraft(false)}> + + + + Create pull request + + Open a pull request that is ready for review + + + setIsDraft(true)}> + + + + Create draft pull request + + Cannot be merged until marked ready for review + + + + + + + + + + ); +} + +createRoot(document.getElementById("root")!).render( + + + +); diff --git a/ui/src/apps/pr-write/index.html b/ui/src/apps/pr-write/index.html new file mode 100644 index 0000000000..e05c57ed50 --- /dev/null +++ b/ui/src/apps/pr-write/index.html @@ -0,0 +1,12 @@ + + + + + + Create Pull Request + + +
+ + + diff --git a/ui/src/components/AppProvider.tsx b/ui/src/components/AppProvider.tsx new file mode 100644 index 0000000000..7848c38197 --- /dev/null +++ b/ui/src/components/AppProvider.tsx @@ -0,0 +1,26 @@ +import { ThemeProvider, BaseStyles, Box } from "@primer/react"; +import type { ReactNode } from "react"; +import { useEffect } from "react"; + +interface AppProviderProps { + children: ReactNode; +} + +export function AppProvider({ children }: AppProviderProps) { + useEffect(() => { + // Set up theme data attributes for proper Primer theming + const prefersDark = window.matchMedia("(prefers-color-scheme: dark)").matches; + const colorMode = prefersDark ? "dark" : "light"; + document.body.setAttribute("data-color-mode", colorMode); + document.body.setAttribute("data-light-theme", "light"); + document.body.setAttribute("data-dark-theme", "dark"); + }, []); + + return ( + + + {children} + + + ); +} diff --git a/ui/src/components/MarkdownEditor.tsx b/ui/src/components/MarkdownEditor.tsx new file mode 100644 index 0000000000..5ba25932d0 --- /dev/null +++ b/ui/src/components/MarkdownEditor.tsx @@ -0,0 +1,447 @@ +/** + * MarkdownEditor component using GitHub's official @github/markdown-toolbar-element + * with Primer React styling. This provides the same markdown editing experience + * used on github.com. + * + * @see https://github.com/github/markdown-toolbar-element + */ +import { useId, useRef, useState, useEffect } from "react"; +import { Box, Text, Button, IconButton, useTheme } from "@primer/react"; +import { + BoldIcon, + ItalicIcon, + QuoteIcon, + CodeIcon, + LinkIcon, + ListUnorderedIcon, + ListOrderedIcon, + TasklistIcon, + MarkdownIcon, +} from "@primer/octicons-react"; +import Markdown from "react-markdown"; +import remarkGfm from "remark-gfm"; + +// Import and register the web component +import "@github/markdown-toolbar-element"; + +// Declare types for the web component elements +declare global { + namespace JSX { + interface IntrinsicElements { + "markdown-toolbar": React.DetailedHTMLProps< + React.HTMLAttributes & { for: string }, + HTMLElement + >; + "md-bold": React.DetailedHTMLProps< + React.HTMLAttributes, + HTMLElement + >; + "md-italic": React.DetailedHTMLProps< + React.HTMLAttributes, + HTMLElement + >; + "md-quote": React.DetailedHTMLProps< + React.HTMLAttributes, + HTMLElement + >; + "md-code": React.DetailedHTMLProps< + React.HTMLAttributes, + HTMLElement + >; + "md-link": React.DetailedHTMLProps< + React.HTMLAttributes, + HTMLElement + >; + "md-unordered-list": React.DetailedHTMLProps< + React.HTMLAttributes, + HTMLElement + >; + "md-ordered-list": React.DetailedHTMLProps< + React.HTMLAttributes, + HTMLElement + >; + "md-task-list": React.DetailedHTMLProps< + React.HTMLAttributes, + HTMLElement + >; + } + } +} + +interface MarkdownEditorProps { + value: string; + onChange: (value: string) => void; + placeholder?: string; + minHeight?: number; +} + +export function MarkdownEditor({ + value, + onChange, + placeholder = "Add a description...", + minHeight = 150, +}: MarkdownEditorProps) { + const textareaId = useId(); + const textareaRef = useRef(null); + const [viewMode, setViewMode] = useState<"write" | "preview">("write"); + const { colorScheme } = useTheme(); + const isDark = colorScheme === "dark" || colorScheme === "dark_dimmed"; + + // Sync external value changes to textarea + useEffect(() => { + if (textareaRef.current && textareaRef.current.value !== value) { + textareaRef.current.value = value; + } + }, [value]); + + // Handle Enter key for list continuation + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key !== "Enter" || e.shiftKey) return; + + const textarea = textareaRef.current; + if (!textarea) return; + + const { selectionStart, value: currentValue } = textarea; + + // Get the current line + const beforeCursor = currentValue.substring(0, selectionStart); + const lastNewline = beforeCursor.lastIndexOf("\n"); + const currentLine = beforeCursor.substring(lastNewline + 1); + + // Match different list patterns + const unorderedMatch = currentLine.match(/^(\s*)([-*])\s/); + const orderedMatch = currentLine.match(/^(\s*)(\d+)\.\s/); + const taskMatch = currentLine.match(/^(\s*)([-*])\s\[[ x]\]\s/); + + let prefix = ""; + let isEmpty = false; + + if (taskMatch) { + const indent = taskMatch[1]; + const marker = taskMatch[2]; + // Check if the line only has the list marker with no content + isEmpty = currentLine.trim() === `${marker} [ ]` || currentLine.trim() === `${marker} [x]`; + prefix = `${indent}${marker} [ ] `; + } else if (orderedMatch) { + const indent = orderedMatch[1]; + const num = parseInt(orderedMatch[2], 10); + // Check if the line only has the list marker + isEmpty = currentLine.trim() === `${num}.`; + prefix = `${indent}${num + 1}. `; + } else if (unorderedMatch) { + const indent = unorderedMatch[1]; + const marker = unorderedMatch[2]; + // Check if the line only has the list marker + isEmpty = currentLine.trim() === marker; + prefix = `${indent}${marker} `; + } + + if (prefix) { + e.preventDefault(); + + if (isEmpty) { + // If just the list marker, remove it and exit list + const newValue = currentValue.substring(0, lastNewline + 1) + currentValue.substring(selectionStart); + onChange(newValue); + // Set cursor position after React updates + requestAnimationFrame(() => { + if (textarea) { + textarea.selectionStart = textarea.selectionEnd = lastNewline + 1; + textarea.focus(); + } + }); + } else { + // Continue the list on the next line + const afterCursor = currentValue.substring(selectionStart); + const newValue = beforeCursor + "\n" + prefix + afterCursor; + onChange(newValue); + // Set cursor position after the prefix + const newCursorPos = selectionStart + 1 + prefix.length; + requestAnimationFrame(() => { + if (textarea) { + textarea.selectionStart = textarea.selectionEnd = newCursorPos; + textarea.focus(); + } + }); + } + } + }; + + return ( + + {/* Header with tabs and toolbar */} + + {/* Write/Preview tabs */} + + + + + + {/* Toolbar - uses GitHub's official markdown-toolbar-element */} + {viewMode === "write" && ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + )} + + + {/* Content area */} + {viewMode === "write" ? ( +