diff --git a/AGENTS.md b/AGENTS.md index 0d06f60..2f14e98 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -31,9 +31,7 @@ Alongside the correctly generated pipeline yaml, an agent file is generated from │ ├── execute.rs # Stage 2 safe output execution │ ├── fuzzy_schedule.rs # Fuzzy schedule parsing │ ├── logging.rs # File-based logging infrastructure -│ ├── mcp.rs # SafeOutputs MCP server -│ ├── mcp_firewall.rs # MCP Firewall server -│ ├── mcp_metadata.rs # Bundled MCP metadata +│ ├── mcp.rs # SafeOutputs MCP server (stdio + HTTP) │ ├── ndjson.rs # NDJSON parsing utilities │ ├── proxy.rs # Network proxy implementation │ ├── sanitize.rs # Input sanitization for safe outputs @@ -50,7 +48,6 @@ Alongside the correctly generated pipeline yaml, an agent file is generated from │ ├── base.yml # Base pipeline template for standalone │ ├── 1es-base.yml # Base pipeline template for 1ES target │ └── threat-analysis.md # Threat detection analysis prompt template -├── mcp-metadata.json # Bundled MCP tool definitions ├── examples/ # Example agent definitions ├── tests/ # Integration tests and fixtures ├── Cargo.toml # Rust dependencies @@ -126,18 +123,7 @@ checkout: # optional list of repository aliases for the agent to checkout and wo # env: # RESERVED: workflow-level environment variables (not yet implemented) # CUSTOM_VAR: "value" mcp-servers: - ado: true # built-in, enabled with defaults - bluebird: true - es-chat: true - msft-learn: true - icm: - allowed: # built-in with restricted functions - - create_incident - - get_incident - kusto: - allowed: - - query - my-custom-tool: # custom MCP server (has command field) + my-custom-tool: # custom MCP server (requires command field) command: "node" args: ["path/to/mcp-server.js"] allowed: @@ -286,7 +272,7 @@ The `target` field in the front matter determines the output format and executio Generates a self-contained Azure DevOps pipeline with: - Full 3-job pipeline: `PerformAgenticTask` → `AnalyzeSafeOutputs` → `ProcessSafeOutputs` - AWF (Agentic Workflow Firewall) L7 domain whitelisting via Squid proxy + Docker -- MCP firewall with tool-level filtering and custom MCP server support +- MCP Gateway (MCPG) for MCP routing with SafeOutputs HTTP backend - Setup/teardown job support - All safe output features (create-pull-request, create-work-item, etc.) @@ -390,13 +376,10 @@ Should be replaced with the human-readable name from the front matter (e.g., "Da Additional params provided to agency CLI. The compiler generates: - `--model ` - AI model from `engine` front matter field (default: claude-opus-4.5) -- `--disable-builtin-mcps` - Disables all built-in MCPs initially - `--no-ask-user` - Prevents interactive prompts - `--allow-tool ` - Explicitly allows specific tools (github, safeoutputs, write, shell commands like cat, date, echo, grep, head, ls, pwd, sort, tail, uniq, wc, yq) -- `--disable-mcp-server ` - Disables specific MCPs (all built-in MCPs are disabled by default and must be explicitly enabled via mcp-servers config) -- `--mcp ` - Enables MCPs specified in front matter -Only built-in MCPs are passed via params. Custom MCPs (with command field) are handled separately. +MCP servers are handled entirely by the MCP Gateway (MCPG) and are not passed as copilot CLI params. ## {{ pool }} @@ -513,9 +496,17 @@ resources: Should be replaced with the markdown body (agent instructions) extracted from the source markdown file, excluding the YAML front matter. This content provides the agent with its task description and guidelines. -## {{ firewall_config }} +## {{ mcpg_config }} + +Should be replaced with the MCP Gateway (MCPG) configuration JSON generated from the `mcp-servers:` front matter. This configuration defines the MCPG server entries and gateway settings. + +The generated JSON has two top-level sections: +- `mcpServers`: Maps server names to their configuration (type, command/url, tools, etc.) +- `gateway`: Gateway settings (port, domain, apiKey, payloadDir) -Should be replaced with the MCP firewall configuration JSON generated from the `mcp-servers:` front matter. This configuration defines which MCP servers to spawn and which tools are allowed for each upstream. +SafeOutputs is always included as an HTTP backend (`type: "http"`) pointing to `host.docker.internal`. Custom MCPs with explicit `command:` are included as stdio servers (`type: "stdio"`). MCPs without a command are skipped (there are no built-in MCPs in the copilot CLI). + +Runtime placeholders (`${SAFE_OUTPUTS_PORT}`, `${SAFE_OUTPUTS_API_KEY}`, `${MCP_GATEWAY_API_KEY}`) are substituted by the pipeline at runtime before passing the config to MCPG. ## {{ allowed_domains }} @@ -605,6 +596,14 @@ https://github.com/github/gh-aw-firewall/releases/download/v{VERSION}/awf-linux- A `checksums.txt` file is also downloaded and verified via `sha256sum -c checksums.txt --ignore-missing` to ensure binary integrity. +## {{ mcpg_version }} + +Should be replaced with the pinned version of the MCP Gateway (defined as `MCPG_VERSION` constant in `src/compile/common.rs`). Used to tag the MCPG Docker image in the pipeline. + +## {{ mcpg_image }} + +Should be replaced with the MCPG Docker image name (defined as `MCPG_IMAGE` constant in `src/compile/common.rs`). Currently `ghcr.io/github/gh-aw-mcpg`. + ### 1ES-Specific Template Markers The following markers are specific to the 1ES target (`target: 1es`) and are not used in standalone pipelines: @@ -617,7 +616,7 @@ Should be replaced with the agent context root for 1ES Agency jobs. This determi ## {{ mcp_configuration }} -Should be replaced with the MCP server configuration for 1ES templates. For each enabled built-in MCP, generates service connection references: +Should be replaced with the MCP server configuration for 1ES templates. For each enabled MCP with a service connection, generates service connection references: ```yaml ado: @@ -626,7 +625,7 @@ kusto: serviceConnection: mcp-kusto-service-connection ``` -Custom MCP servers (with `command:` field) are not supported in 1ES target. Only built-in MCPs with corresponding service connections are supported. +Custom MCP servers (with `command:` field) are not supported in 1ES target. MCPs must have service connection configuration. ## {{ global_options }} @@ -650,7 +649,10 @@ Global flags (apply to all subcommands): `--verbose, -v` (enable info-level logg - `` - Path to the source markdown file - `` - Path to the pipeline YAML file to verify - Useful for CI checks to ensure pipelines are regenerated after source changes -- `mcp ` - Run as an MCP server for safe outputs +- `mcp ` - Run SafeOutputs as a stdio MCP server +- `mcp-http ` - Run SafeOutputs as an HTTP MCP server (for MCPG integration) + - `--port ` - Port to listen on (default: 8100) + - `--api-key ` - API key for authentication (auto-generated if not provided) - `execute` - Execute safe outputs from Stage 1 (Stage 2 of pipeline) - `--source, -s ` - Path to source markdown file - `--safe-output-dir ` - Directory containing safe output NDJSON (default: current directory) @@ -659,8 +661,6 @@ Global flags (apply to all subcommands): `--verbose, -v` (enable info-level logg - `--ado-project ` - Azure DevOps project name override - `proxy` - Start an HTTP proxy for network filtering - `--allow ` - Allowed hosts (supports wildcards, can be repeated) -- `mcp-firewall` - Start an MCP firewall server that proxies tool calls - - `--config, -c ` - Path to firewall configuration JSON file ## Safe Outputs Configuration @@ -883,34 +883,11 @@ cargo add ## MCP Configuration -The `mcp-servers:` field provides a unified way to configure both built-in and custom MCP (Model Context Protocol) servers. The compiler distinguishes between them by checking for the `command:` field—if present, it's a custom server; otherwise, it's a built-in. - -### Built-in MCP Servers - -Enable built-in servers with `true` or configure them with options: - -```yaml -mcp-servers: - ado: true # enabled with all default functions - ado-ext: true # Extended ADO functionality - asa: true # Azure Stream Analytics MCP - bluebird: true # Bluebird MCP - calculator: true # Calculator MCP - es-chat: true - icm: # enabled with restricted functions - allowed: - - create_incident - - get_incident - kusto: - allowed: - - query - msft-learn: true - stack: true # Stack MCP -``` +The `mcp-servers:` field configures MCP (Model Context Protocol) servers that are made available to the agent via the MCP Gateway (MCPG). All MCPs require explicit `command:` configuration — there are no built-in MCPs in the copilot CLI. ### Custom MCP Servers -Define custom servers by including a `command:` field: +Define MCP servers by including a `command:` field: ```yaml mcp-servers: @@ -924,27 +901,15 @@ mcp-servers: ### Configuration Properties -**For built-in MCPs:** -- `true` - Enable with all default functions -- `allowed:` - Array of function names to restrict available tools - -**For custom MCPs (requires `command:`):** - `command:` - The executable to run (e.g., `"node"`, `"python"`, `"dotnet"`) - `args:` - Array of command-line arguments passed to the command - `allowed:` - Array of function names agents are permitted to call (required for security) - `env:` - Optional environment variables for the MCP server process -### Example: Mixed Configuration +### Example Configuration ```yaml mcp-servers: - # Built-in servers - ado: true - ado-ext: true - es-chat: true - icm: - allowed: [create_incident, get_incident] - # Custom Python MCP server data-processor: command: "python" @@ -970,7 +935,7 @@ mcp-servers: 2. **Command Validation**: The compiler validates that commands are from a trusted set 3. **Argument Sanitization**: Arguments are validated to prevent injection attacks 4. **Environment Isolation**: MCP servers run in the same isolated sandbox as the pipeline -5. **Built-in Trust**: Built-in MCPs are pre-vetted; custom MCPs require explicit `allowed:` list +5. **MCPG Gateway**: All MCP traffic flows through the MCP Gateway which enforces tool-level filtering ## Network Isolation (AWF) @@ -1016,6 +981,7 @@ The following domains are always allowed (defined in `allowed_hosts.rs`): | `dc.services.visualstudio.com` | Visual Studio telemetry | | `rt.services.visualstudio.com` | Visual Studio runtime telemetry | | `config.edge.skype.com` | Agency configuration | +| `host.docker.internal` | MCP Gateway (MCPG) on host | ### Adding Additional Hosts @@ -1047,145 +1013,104 @@ When not configured: - ADO access tokens are omitted from the copilot invocation - The agent cannot authenticate to ADO APIs -## MCP Firewall +## MCP Gateway (MCPG) -The MCP Firewall is a security layer that acts as a filtering proxy between agents and their configured MCP servers. It provides policy-based access control and audit logging for all tool calls. - -### Purpose - -When agents are configured with multiple MCPs (e.g., `ado`, `kusto`, `icm`), the firewall: - -1. **Loads tool definitions** from pre-generated metadata (`mcp-metadata.json`) -2. **Enforces allow-lists** - only exposes tools explicitly permitted in the config -3. **Namespaces tools** - tools appear as `upstream:tool_name` (e.g., `icm:create_incident`) -4. **Spawns upstream MCPs lazily** as child processes when tools are actually called -5. **Routes tool calls** to the appropriate upstream server -6. **Logs all attempts** for security auditing +The MCP Gateway ([gh-aw-mcpg](https://github.com/github/gh-aw-mcpg)) is the upstream MCP routing layer that connects agents to their configured MCP servers. It replaces the previous custom MCP firewall with the standard gh-aw gateway implementation. ### Architecture ``` -┌─────────────┐ ┌──────────────────┐ ┌─────────────────┐ -│ │ │ │ │ ado MCP │ -│ Agent │────▶│ MCP Firewall │────▶│ (agency mcp ado)│ -│ (Agency) │ │ │ └─────────────────┘ -│ │ │ - Policy check │ ┌─────────────────┐ -└─────────────┘ │ - Tool routing │────▶│ icm MCP │ - │ - Audit logging │ │ (agency mcp icm)│ - └──────────────────┘ └─────────────────┘ - ┌─────────────────┐ - ────▶│ custom MCP │ - │ (node server.js)│ - └─────────────────┘ + Host +┌─────────────────────────────────────────────────┐ +│ │ +│ ┌──────────────┐ ┌──────────────────────┐ │ +│ │ SafeOutputs │ │ MCPG Gateway │ │ +│ │ HTTP Server │◀────│ (Docker, --network │ │ +│ │ (ado-aw │ │ host, port 80) │ │ +│ │ mcp-http) │ │ │ │ +│ │ port 8100 │ │ Routes tool calls │ │ +│ └──────────────┘ │ to upstreams │ │ +│ └──────────┬───────────┘ │ +│ │ │ +│ ┌─────────────────┐ │ │ +│ │ Custom MCP │◀────┘ │ +│ │ (stdio server) │ │ +│ └─────────────────┘ │ +└─────────────────────────────────────────────────┘ + │ + host.docker.internal:80 + │ +┌─────────────────────────────────────────────────┐ +│ AWF Container │ +│ │ +│ ┌──────────┐ │ +│ │ Copilot │──── HTTP ──── MCPG (via host) │ +│ │ Agent │ │ +│ └──────────┘ │ +└─────────────────────────────────────────────────┘ ``` -### Configuration File Format +### How It Works + +1. **SafeOutputs HTTP server** starts on the host (port 8100) via `ado-aw mcp-http` +2. **MCPG container** starts on the host network (`docker run --network host`) +3. **MCPG config** (generated by the compiler) defines: + - SafeOutputs as an HTTP backend (`type: "http"`, URL points to localhost:8100) + - Custom MCPs as stdio servers (`type: "stdio"`, spawned by MCPG) + - Gateway settings (port 80, API key, payload directory) +4. **Agent inside AWF** connects to MCPG via `http://host.docker.internal:80/mcp` +5. MCPG routes tool calls to the appropriate upstream (SafeOutputs or custom MCPs) +6. After the agent completes, MCPG and SafeOutputs are stopped + +### MCPG Configuration Format -The firewall reads a JSON configuration file at runtime: +The compiler generates MCPG configuration JSON from the `mcp-servers:` front matter: ```json { - "upstreams": { - "ado": { - "command": "agency", - "args": ["mcp", "ado"], - "env": {}, - "allowed": ["*"] - }, - "icm": { - "command": "agency", - "args": ["mcp", "icm"], - "env": {}, - "allowed": ["create_incident", "get_incident"] - }, - "kusto": { - "command": "agency", - "args": ["mcp", "kusto"], - "env": {}, - "allowed": ["query"] + "mcpServers": { + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:8100/mcp", + "headers": { + "Authorization": "Bearer " + } }, "custom-tool": { + "type": "stdio", "command": "node", "args": ["server.js"], - "env": { "NODE_ENV": "production" }, - "allowed": ["process_data", "get_status"], - "spawn_timeout_secs": 60 + "tools": ["process_data", "get_status"] } + }, + "gateway": { + "port": 80, + "domain": "host.docker.internal", + "apiKey": "", + "payloadDir": "/tmp/gh-aw/mcp-payloads" } } ``` -### Configuration Properties (Firewall) - -Each upstream configuration supports: - -| Property | Required | Default | Description | -|----------|----------|---------|-------------| -| `command` | Yes | - | The executable to spawn | -| `args` | No | `[]` | Arguments passed to the command | -| `env` | No | `{}` | Environment variables for the process | -| `allowed` | Yes | - | Tool names allowed (supports `"*"` and prefix wildcards) | -| `spawn_timeout_secs` | No | `30` | Timeout in seconds for spawning and initializing the MCP server | - -### Allow-list Patterns - -The `allowed` field supports several patterns: - -| Pattern | Description | Example | -|---------|-------------|---------| -| `"*"` | Allow all tools from this upstream | `["*"]` | -| `"exact_name"` | Allow only this specific tool | `["query", "execute"]` | -| `"prefix_*"` | Allow tools starting with prefix | `["get_*", "list_*"]` | - -### Tool Namespacing - -All tools exposed by the firewall are namespaced with their upstream name: - -- `ado:create-work-item` - from the `ado` upstream -- `icm:create_incident` - from the `icm` upstream -- `kusto:query` - from the `kusto` upstream - -This prevents tool name collisions and makes it clear which upstream handles each call. - -### CLI Usage - -```bash -# Start the MCP firewall server -ado-aw mcp-firewall --config /path/to/config.json -``` +Runtime placeholders (`${SAFE_OUTPUTS_PORT}`, `${SAFE_OUTPUTS_API_KEY}`, `${MCP_GATEWAY_API_KEY}`) are substituted by the pipeline before passing the config to MCPG. ### Pipeline Integration -The firewall is automatically configured in generated pipelines: - -1. **Config Generation**: The compiler generates `mcp-firewall-config.json` from the agent's `mcp-servers:` front matter -2. **MCP Registration**: The firewall is registered in the agency MCP config as `mcp-firewall` -3. **Runtime Launch**: When agency starts, it launches the firewall which spawns upstream MCPs - -The firewall config is written to `$(Agent.TempDirectory)/staging/mcp-firewall-config.json` in its own pipeline step, making it easy to inspect and debug. - -### Audit Logging - -All tool call attempts are logged to the centralized log file at `$HOME/.ado-aw/logs/YYYY-MM-DD.log`: - -``` -[2026-01-29T10:15:32Z] [INFO] [firewall] ALLOWED icm:create_incident (args: {"title": "...", "severity": 3}) -[2026-01-29T10:15:45Z] [INFO] [firewall] BLOCKED icm:delete_incident (not in allowlist) -[2026-01-29T10:16:01Z] [INFO] [firewall] ALLOWED kusto:query (args: {"cluster": "...", "query": "..."}) -``` - -This provides a complete audit trail of agent actions for security review. +The MCPG is automatically configured in generated standalone pipelines: -### Error Handling +1. **Config Generation**: The compiler generates `mcpg-config.json` from the agent's `mcp-servers:` front matter +2. **SafeOutputs Start**: `ado-aw mcp-http` starts as a background process on the host +3. **MCPG Start**: The MCPG Docker container starts on the host network with config via stdin +4. **Agent Execution**: AWF runs the agent with `--enable-host-access`, copilot connects to MCPG via HTTP +5. **Cleanup**: Both MCPG and SafeOutputs are stopped after the agent completes (condition: always) -- **Upstream spawn failure**: If an upstream fails to start, the firewall continues with remaining upstreams (partial functionality) -- **Tool not found**: Returns an MCP error if the requested tool doesn't exist -- **Policy violation**: Returns an MCP error if the tool exists but isn't in the allow-list -- **Upstream error**: Propagates errors from upstream MCPs back to the agent +The MCPG config is written to `$(Agent.TempDirectory)/staging/mcpg-config.json` in its own pipeline step, making it easy to inspect and debug. ## References - [GitHub Agentic Workflows](https://github.com/githubnext/gh-aw) - Inspiration for this project +- [MCP Gateway (gh-aw-mcpg)](https://github.com/github/gh-aw-mcpg) - MCP routing gateway +- [AWF (gh-aw-firewall)](https://github.com/github/gh-aw-firewall) - Network isolation firewall - [Azure DevOps YAML Schema](https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema) - [OneBranch Documentation](https://aka.ms/onebranchdocs) - [Clap Documentation](https://docs.rs/clap/latest/clap/) diff --git a/Cargo.lock b/Cargo.lock index 7415195..30d5fd3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,6 +8,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "axum", "chrono", "clap", "dirs", @@ -22,7 +23,6 @@ dependencies = [ "serde_json", "serde_yaml", "tempfile", - "terminal_size", "tokio", ] @@ -123,6 +123,58 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "axum" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8" +dependencies = [ + "axum-core", + "bytes", + "form_urlencoded", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "serde_core", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "base64" version = "0.22.1" @@ -445,6 +497,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "foreign-types" version = "0.3.2" @@ -586,8 +644,21 @@ checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "libc", - "r-efi", + "r-efi 5.3.0", + "wasip2", +] + +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi 6.0.0", "wasip2", + "wasip3", ] [[package]] @@ -609,6 +680,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + [[package]] name = "hashbrown" version = "0.16.1" @@ -660,6 +740,12 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + [[package]] name = "hyper" version = "1.8.1" @@ -674,6 +760,7 @@ dependencies = [ "http", "http-body", "httparse", + "httpdate", "itoa", "pin-project-lite", "pin-utils", @@ -845,6 +932,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "ident_case" version = "1.0.1" @@ -879,7 +972,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.16.1", + "serde", + "serde_core", ] [[package]] @@ -959,6 +1054,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "libc" version = "0.2.180" @@ -1008,6 +1109,12 @@ version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "memchr" version = "2.7.6" @@ -1197,6 +1304,25 @@ dependencies = [ "zerovec", ] +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + [[package]] name = "proc-macro2" version = "1.0.105" @@ -1221,6 +1347,41 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", +] + [[package]] name = "redox_syscall" version = "0.5.18" @@ -1357,18 +1518,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5947688160b56fb6c827e3c20a72c90392a1d7e9dec74749197aa1780ac42ca" dependencies = [ "base64", + "bytes", "chrono", "futures", + "http", + "http-body", + "http-body-util", "paste", "pin-project-lite", + "rand", "rmcp-macros", "schemars", "serde", "serde_json", + "sse-stream", "thiserror", "tokio", + "tokio-stream", "tokio-util", + "tower-service", "tracing", + "uuid", ] [[package]] @@ -1575,6 +1745,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -1659,6 +1840,19 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "sse-stream" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb4dc4d33c68ec1f27d386b5610a351922656e1fdf5c05bbaad930cd1519479a" +dependencies = [ + "bytes", + "futures-util", + "http-body", + "http-body-util", + "pin-project-lite", +] + [[package]] name = "stable_deref_trait" version = "1.2.1" @@ -1742,16 +1936,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "terminal_size" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" -dependencies = [ - "rustix", - "windows-sys 0.60.2", -] - [[package]] name = "thiserror" version = "2.0.18" @@ -1839,6 +2023,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.18" @@ -1865,6 +2060,7 @@ dependencies = [ "tokio", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -1903,6 +2099,7 @@ version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -1952,6 +2149,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -1988,6 +2191,17 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" +dependencies = [ + "getrandom 0.4.2", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "vcpkg" version = "0.2.15" @@ -2018,6 +2232,15 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasm-bindgen" version = "0.2.108" @@ -2077,6 +2300,40 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + [[package]] name = "web-sys" version = "0.3.85" @@ -2340,6 +2597,88 @@ name = "wit-bindgen" version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] [[package]] name = "writeable" @@ -2370,6 +2709,26 @@ dependencies = [ "synstructure", ] +[[package]] +name = "zerocopy" +version = "0.8.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a789c6e490b576db9f7e6b6d661bcc9799f7c0ac8352f56ea20193b2681532e5" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f65c489a7071a749c849713807783f70672b28094011623e200cb86dcb835953" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "zerofrom" version = "0.1.6" diff --git a/Cargo.toml b/Cargo.toml index 78254e2..be2d083 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,7 +13,7 @@ serde = { version = "1.0.228", features = ["derive"] } serde_yaml = "0.9.34" serde_json = "1.0.149" schemars = "1.2" -rmcp = { version = "0.8.0", features = ["server", "transport-io"] } +rmcp = { version = "0.8.0", features = ["server", "transport-io", "transport-streamable-http-server"] } reqwest = { version = "0.12", features = ["json"] } tempfile = "3" tokio = { version = "1.43", features = ["full"] } @@ -21,4 +21,4 @@ log = "0.4" env_logger = "0.11" regex-lite = "0.1" inquire = { version = "0.9.2", features = ["editor"] } -terminal_size = "0.4.3" +axum = { version = "0.8.8", features = ["tokio"] } diff --git a/mcp-metadata.json b/mcp-metadata.json deleted file mode 100644 index 33776c9..0000000 --- a/mcp-metadata.json +++ /dev/null @@ -1,4176 +0,0 @@ -{ - "version": "1.0", - "generated_at": "2026-01-29T15:08:44.279920500+00:00", - "mcps": { - "ado": { - "name": "ado", - "builtin": true, - "tools": [ - { - "name": "core_list_project_teams", - "description": "Retrieve a list of teams for the specified Azure DevOps project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "mine": { - "description": "If true, only return teams that the authenticated user is a member of.", - "type": "boolean" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "skip": { - "description": "The number of teams to skip for pagination. Defaults to 0.", - "type": "number" - }, - "top": { - "description": "The maximum number of teams to return. Defaults to 100.", - "type": "number" - } - }, - "required": [ - "project" - ], - "type": "object" - } - }, - { - "name": "core_list_projects", - "description": "Retrieve a list of projects in your Azure DevOps organization.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "continuationToken": { - "description": "Continuation token for pagination. Used to fetch the next set of results if available.", - "type": "number" - }, - "projectNameFilter": { - "description": "Filter projects by name. Supports partial matches.", - "type": "string" - }, - "skip": { - "description": "The number of projects to skip for pagination. Defaults to 0.", - "type": "number" - }, - "stateFilter": { - "default": "wellFormed", - "description": "Filter projects by their state. Defaults to 'wellFormed'.", - "enum": [ - "all", - "wellFormed", - "createPending", - "deleted" - ], - "type": "string" - }, - "top": { - "description": "The maximum number of projects to return. Defaults to 100.", - "type": "number" - } - }, - "type": "object" - } - }, - { - "name": "core_get_identity_ids", - "description": "Retrieve Azure DevOps identity IDs for a provided search filter.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "searchFilter": { - "description": "Search filter (unique name, display name, email) to retrieve identity IDs for.", - "type": "string" - } - }, - "required": [ - "searchFilter" - ], - "type": "object" - } - }, - { - "name": "work_list_team_iterations", - "description": "Retrieve a list of iterations for a specific team in a project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "team": { - "description": "The name or ID of the Azure DevOps team.", - "type": "string" - }, - "timeframe": { - "description": "The timeframe for which to retrieve iterations. Currently, only 'current' is supported.", - "enum": [ - "current" - ], - "type": "string" - } - }, - "required": [ - "project", - "team" - ], - "type": "object" - } - }, - { - "name": "work_create_iterations", - "description": "Create new iterations in a specified Azure DevOps project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "iterations": { - "description": "An array of iterations to create. Each iteration must have a name and can optionally have start and finish dates in ISO format.", - "items": { - "additionalProperties": false, - "properties": { - "finishDate": { - "description": "The finish date of the iteration in ISO format (e.g., '2023-01-31T23:59:59Z'). Optional.", - "type": "string" - }, - "iterationName": { - "description": "The name of the iteration to create.", - "type": "string" - }, - "startDate": { - "description": "The start date of the iteration in ISO format (e.g., '2023-01-01T00:00:00Z'). Optional.", - "type": "string" - } - }, - "required": [ - "iterationName" - ], - "type": "object" - }, - "type": "array" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - } - }, - "required": [ - "project", - "iterations" - ], - "type": "object" - } - }, - { - "name": "work_list_iterations", - "description": "List all iterations in a specified Azure DevOps project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "depth": { - "default": 2, - "description": "Depth of children to fetch.", - "type": "number" - }, - "excludedIds": { - "description": "An optional array of iteration IDs, and thier children, that should not be returned.", - "items": { - "type": "number" - }, - "type": "array" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - } - }, - "required": [ - "project" - ], - "type": "object" - } - }, - { - "name": "work_assign_iterations", - "description": "Assign existing iterations to a specific team in a project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "iterations": { - "description": "An array of iterations to assign. Each iteration must have an identifier and a path.", - "items": { - "additionalProperties": false, - "properties": { - "identifier": { - "description": "The identifier of the iteration to assign.", - "type": "string" - }, - "path": { - "description": "The path of the iteration to assign, e.g., 'Project/Iteration'.", - "type": "string" - } - }, - "required": [ - "identifier", - "path" - ], - "type": "object" - }, - "type": "array" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "team": { - "description": "The name or ID of the Azure DevOps team.", - "type": "string" - } - }, - "required": [ - "project", - "team", - "iterations" - ], - "type": "object" - } - }, - { - "name": "work_get_team_capacity", - "description": "Get the team capacity of a specific team and iteration in a project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "iterationId": { - "description": "The Iteration Id to get capacity for.", - "type": "string" - }, - "project": { - "description": "The name or Id of the Azure DevOps project.", - "type": "string" - }, - "team": { - "description": "The name or Id of the Azure DevOps team.", - "type": "string" - } - }, - "required": [ - "project", - "team", - "iterationId" - ], - "type": "object" - } - }, - { - "name": "work_update_team_capacity", - "description": "Update the team capacity of a team member for a specific iteration in a project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "activities": { - "description": "Array of activities and their daily capacities for the team member.", - "items": { - "additionalProperties": false, - "properties": { - "capacityPerDay": { - "description": "The capacity per day for this activity.", - "type": "number" - }, - "name": { - "description": "The name of the activity (e.g., 'Development').", - "type": "string" - } - }, - "required": [ - "name", - "capacityPerDay" - ], - "type": "object" - }, - "type": "array" - }, - "daysOff": { - "description": "Array of days off for the team member, each with a start and end date in ISO format.", - "items": { - "additionalProperties": false, - "properties": { - "end": { - "description": "End date of the day off in ISO format.", - "type": "string" - }, - "start": { - "description": "Start date of the day off in ISO format.", - "type": "string" - } - }, - "required": [ - "start", - "end" - ], - "type": "object" - }, - "type": "array" - }, - "iterationId": { - "description": "The Iteration Id to update the capacity for.", - "type": "string" - }, - "project": { - "description": "The name or Id of the Azure DevOps project.", - "type": "string" - }, - "team": { - "description": "The name or Id of the Azure DevOps team.", - "type": "string" - }, - "teamMemberId": { - "description": "The team member Id for the specific team member.", - "type": "string" - } - }, - "required": [ - "project", - "team", - "teamMemberId", - "iterationId", - "activities" - ], - "type": "object" - } - }, - { - "name": "work_get_iteration_capacities", - "description": "Get an iteration's capacity for all teams in iteration and project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "iterationId": { - "description": "The Iteration Id to get capacity for.", - "type": "string" - }, - "project": { - "description": "The name or Id of the Azure DevOps project.", - "type": "string" - } - }, - "required": [ - "project", - "iterationId" - ], - "type": "object" - } - }, - { - "name": "pipelines_get_build_definitions", - "description": "Retrieves a list of build definitions for a given project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "builtAfter": { - "description": "Return definitions that have builds after this date", - "format": "date-time", - "type": "string" - }, - "continuationToken": { - "description": "Token for continuing paged results", - "type": "string" - }, - "definitionIds": { - "description": "Array of build definition IDs to filter", - "items": { - "type": "number" - }, - "type": "array" - }, - "includeAllProperties": { - "description": "Whether to include all properties in the results", - "type": "boolean" - }, - "includeLatestBuilds": { - "description": "Whether to include the latest builds for each definition", - "type": "boolean" - }, - "minMetricsTime": { - "description": "Minimum metrics time to filter build definitions", - "format": "date-time", - "type": "string" - }, - "name": { - "description": "Name of the build definition to filter", - "type": "string" - }, - "notBuiltAfter": { - "description": "Return definitions that do not have builds after this date", - "format": "date-time", - "type": "string" - }, - "path": { - "description": "Path of the build definition to filter", - "type": "string" - }, - "processType": { - "description": "Process type to filter build definitions", - "type": "number" - }, - "project": { - "description": "Project ID or name to get build definitions for", - "type": "string" - }, - "queryOrder": { - "description": "Order in which build definitions are returned", - "enum": [ - "None", - "LastModifiedAscending", - "LastModifiedDescending", - "DefinitionNameAscending", - "DefinitionNameDescending" - ], - "type": "string" - }, - "repositoryId": { - "description": "Repository ID to filter build definitions", - "type": "string" - }, - "repositoryType": { - "description": "Type of repository to filter build definitions", - "enum": [ - "TfsGit", - "GitHub", - "BitbucketCloud" - ], - "type": "string" - }, - "taskIdFilter": { - "description": "Task ID to filter build definitions", - "type": "string" - }, - "top": { - "description": "Maximum number of build definitions to return", - "type": "number" - }, - "yamlFilename": { - "description": "YAML filename to filter build definitions", - "type": "string" - } - }, - "required": [ - "project" - ], - "type": "object" - } - }, - { - "name": "pipelines_create_pipeline", - "description": "Creates a pipeline definition with YAML configuration for a given project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "folder": { - "description": "Folder path for the new pipeline. Defaults to '\\' if not specified.", - "type": "string" - }, - "name": { - "description": "Name of the new pipeline.", - "type": "string" - }, - "project": { - "description": "Project ID or name to run the build in.", - "type": "string" - }, - "repositoryConnectionId": { - "description": "The service connection ID for GitHub repositories. Not required for Azure Repos Git.", - "type": "string" - }, - "repositoryId": { - "description": "The ID of the repository.", - "type": "string" - }, - "repositoryName": { - "description": "The name of the repository. In case of GitHub repository, this is the full name (:owner/:repo) - e.g. octocat/Hello-World.", - "type": "string" - }, - "repositoryType": { - "description": "The type of repository where the pipeline's YAML file is located.", - "enum": [ - "Unknown", - "GitHub", - "AzureReposGit", - "GitHubEnterprise", - "BitBucket", - "AzureReposGitHyphenated" - ], - "type": "string" - }, - "yamlPath": { - "description": "The path to the pipeline's YAML file in the repository", - "type": "string" - } - }, - "required": [ - "project", - "name", - "yamlPath", - "repositoryType", - "repositoryName" - ], - "type": "object" - } - }, - { - "name": "pipelines_get_build_definition_revisions", - "description": "Retrieves a list of revisions for a specific build definition.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "definitionId": { - "description": "ID of the build definition to get revisions for", - "type": "number" - }, - "project": { - "description": "Project ID or name to get the build definition revisions for", - "type": "string" - } - }, - "required": [ - "project", - "definitionId" - ], - "type": "object" - } - }, - { - "name": "pipelines_get_builds", - "description": "Retrieves a list of builds for a given project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "branchName": { - "description": "Branch name to filter builds", - "type": "string" - }, - "buildIds": { - "description": "Array of build IDs to retrieve", - "items": { - "type": "number" - }, - "type": "array" - }, - "buildNumber": { - "description": "Build number to filter builds", - "type": "string" - }, - "continuationToken": { - "description": "Token for continuing paged results", - "type": "string" - }, - "definitions": { - "description": "Array of build definition IDs to filter builds", - "items": { - "type": "number" - }, - "type": "array" - }, - "deletedFilter": { - "description": "Filter for deleted builds (see QueryDeletedOption enum)", - "type": "number" - }, - "maxBuildsPerDefinition": { - "description": "Maximum number of builds per definition", - "type": "number" - }, - "maxTime": { - "description": "Maximum finish time to filter builds", - "format": "date-time", - "type": "string" - }, - "minTime": { - "description": "Minimum finish time to filter builds", - "format": "date-time", - "type": "string" - }, - "project": { - "description": "Project ID or name to get builds for", - "type": "string" - }, - "properties": { - "description": "Array of property names to include in the results", - "items": { - "type": "string" - }, - "type": "array" - }, - "queryOrder": { - "default": "QueueTimeDescending", - "description": "Order in which builds are returned", - "enum": [ - "FinishTimeAscending", - "FinishTimeDescending", - "QueueTimeDescending", - "QueueTimeAscending", - "StartTimeDescending", - "StartTimeAscending" - ], - "type": "string" - }, - "queues": { - "description": "Array of queue IDs to filter builds", - "items": { - "type": "number" - }, - "type": "array" - }, - "reasonFilter": { - "description": "Reason filter for the build (see BuildReason enum)", - "type": "number" - }, - "repositoryId": { - "description": "Repository ID to filter builds", - "type": "string" - }, - "repositoryType": { - "description": "Type of repository to filter builds", - "enum": [ - "TfsGit", - "GitHub", - "BitbucketCloud" - ], - "type": "string" - }, - "requestedFor": { - "description": "User ID or name who requested the build", - "type": "string" - }, - "resultFilter": { - "description": "Result filter for the build (see BuildResult enum)", - "type": "number" - }, - "statusFilter": { - "description": "Status filter for the build (see BuildStatus enum)", - "type": "number" - }, - "tagFilters": { - "description": "Array of tags to filter builds", - "items": { - "type": "string" - }, - "type": "array" - }, - "top": { - "description": "Maximum number of builds to return", - "type": "number" - } - }, - "required": [ - "project" - ], - "type": "object" - } - }, - { - "name": "pipelines_get_build_log", - "description": "Retrieves the logs for a specific build.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "buildId": { - "description": "ID of the build to get the log for", - "type": "number" - }, - "project": { - "description": "Project ID or name to get the build log for", - "type": "string" - } - }, - "required": [ - "project", - "buildId" - ], - "type": "object" - } - }, - { - "name": "pipelines_get_build_log_by_id", - "description": "Get a specific build log by log ID.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "buildId": { - "description": "ID of the build to get the log for", - "type": "number" - }, - "endLine": { - "description": "Ending line number for the log content, defaults to the end of the log", - "type": "number" - }, - "logId": { - "description": "ID of the log to retrieve", - "type": "number" - }, - "project": { - "description": "Project ID or name to get the build log for", - "type": "string" - }, - "startLine": { - "description": "Starting line number for the log content, defaults to 0", - "type": "number" - } - }, - "required": [ - "project", - "buildId", - "logId" - ], - "type": "object" - } - }, - { - "name": "pipelines_get_build_changes", - "description": "Get the changes associated with a specific build.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "buildId": { - "description": "ID of the build to get changes for", - "type": "number" - }, - "continuationToken": { - "description": "Continuation token for pagination", - "type": "string" - }, - "includeSourceChange": { - "description": "Whether to include source changes in the results, defaults to false", - "type": "boolean" - }, - "project": { - "description": "Project ID or name to get the build changes for", - "type": "string" - }, - "top": { - "default": 100, - "description": "Number of changes to retrieve, defaults to 100", - "type": "number" - } - }, - "required": [ - "project", - "buildId" - ], - "type": "object" - } - }, - { - "name": "pipelines_get_run", - "description": "Gets a run for a particular pipeline.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "pipelineId": { - "description": "ID of the pipeline to run", - "type": "number" - }, - "project": { - "description": "Project ID or name to run the build in", - "type": "string" - }, - "runId": { - "description": "ID of the run to get", - "type": "number" - } - }, - "required": [ - "project", - "pipelineId", - "runId" - ], - "type": "object" - } - }, - { - "name": "pipelines_list_runs", - "description": "Gets top 10000 runs for a particular pipeline.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "pipelineId": { - "description": "ID of the pipeline to run", - "type": "number" - }, - "project": { - "description": "Project ID or name to run the build in", - "type": "string" - } - }, - "required": [ - "project", - "pipelineId" - ], - "type": "object" - } - }, - { - "name": "pipelines_run_pipeline", - "description": "Starts a new run of a pipeline.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "pipelineId": { - "description": "ID of the pipeline to run", - "type": "number" - }, - "pipelineVersion": { - "description": "Version of the pipeline to run. If not provided, the latest version will be used.", - "type": "number" - }, - "previewRun": { - "description": "If true, returns the final YAML document after parsing templates without creating a new run.", - "type": "boolean" - }, - "project": { - "description": "Project ID or name to run the build in", - "type": "string" - }, - "resources": { - "additionalProperties": false, - "description": "A dictionary of resources to pass to the pipeline.", - "properties": { - "builds": { - "additionalProperties": { - "additionalProperties": false, - "properties": { - "version": { - "description": "Version of the build resource.", - "type": "string" - } - }, - "type": "object" - }, - "type": "object" - }, - "containers": { - "additionalProperties": { - "additionalProperties": false, - "properties": { - "version": { - "description": "Version of the container resource.", - "type": "string" - } - }, - "type": "object" - }, - "type": "object" - }, - "packages": { - "additionalProperties": { - "additionalProperties": false, - "properties": { - "version": { - "description": "Version of the package resource.", - "type": "string" - } - }, - "type": "object" - }, - "type": "object" - }, - "pipelines": { - "additionalProperties": { - "additionalProperties": false, - "properties": { - "runId": { - "description": "Id of the source pipeline run that triggered or is referenced by this pipeline run.", - "type": "number" - }, - "version": { - "description": "Version of the source pipeline run.", - "type": "string" - } - }, - "required": [ - "runId" - ], - "type": "object" - }, - "type": "object" - }, - "repositories": { - "additionalProperties": { - "additionalProperties": false, - "properties": { - "refName": { - "description": "Reference name, e.g., refs/heads/main.", - "type": "string" - }, - "token": { - "type": "string" - }, - "tokenType": { - "type": "string" - }, - "version": { - "description": "Version of the repository resource, git commit sha.", - "type": "string" - } - }, - "required": [ - "refName" - ], - "type": "object" - }, - "type": "object" - } - }, - "required": [ - "pipelines" - ], - "type": "object" - }, - "stagesToSkip": { - "description": "A list of stages to skip.", - "items": { - "type": "string" - }, - "type": "array" - }, - "templateParameters": { - "additionalProperties": { - "type": "string" - }, - "description": "Custom build parameters as key-value pairs", - "type": "object" - }, - "variables": { - "additionalProperties": { - "additionalProperties": false, - "properties": { - "isSecret": { - "type": "boolean" - }, - "value": { - "type": "string" - } - }, - "type": "object" - }, - "description": "A dictionary of variables to pass to the pipeline.", - "type": "object" - }, - "yamlOverride": { - "description": "YAML override for the pipeline run.", - "type": "string" - } - }, - "required": [ - "project", - "pipelineId" - ], - "type": "object" - } - }, - { - "name": "pipelines_get_build_status", - "description": "Fetches the status of a specific build.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "buildId": { - "description": "ID of the build to get the status for", - "type": "number" - }, - "project": { - "description": "Project ID or name to get the build status for", - "type": "string" - } - }, - "required": [ - "project", - "buildId" - ], - "type": "object" - } - }, - { - "name": "pipelines_update_build_stage", - "description": "Updates the stage of a specific build.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "buildId": { - "description": "ID of the build to update", - "type": "number" - }, - "forceRetryAllJobs": { - "default": false, - "description": "Whether to force retry all jobs in the stage.", - "type": "boolean" - }, - "project": { - "description": "Project ID or name to update the build stage for", - "type": "string" - }, - "stageName": { - "description": "Name of the stage to update", - "type": "string" - }, - "status": { - "description": "New status for the stage", - "enum": [ - "Cancel", - "Retry", - "Run" - ], - "type": "string" - } - }, - "required": [ - "project", - "buildId", - "stageName", - "status" - ], - "type": "object" - } - }, - { - "name": "repo_create_pull_request", - "description": "Create a new pull request.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "description": { - "description": "The description of the pull request. Must not be longer than 4000 characters. Optional.", - "maxLength": 4000, - "type": "string" - }, - "forkSourceRepositoryId": { - "description": "The ID of the fork repository that the pull request originates from. Optional, used when creating a pull request from a fork.", - "type": "string" - }, - "isDraft": { - "default": false, - "description": "Indicates whether the pull request is a draft. Defaults to false.", - "type": "boolean" - }, - "labels": { - "description": "Array of label names to add to the pull request after creation.", - "items": { - "type": "string" - }, - "type": "array" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request will be created.", - "type": "string" - }, - "sourceRefName": { - "description": "The source branch name for the pull request, e.g., 'refs/heads/feature-branch'.", - "type": "string" - }, - "targetRefName": { - "description": "The target branch name for the pull request, e.g., 'refs/heads/main'.", - "type": "string" - }, - "title": { - "description": "The title of the pull request.", - "type": "string" - }, - "workItems": { - "description": "Work item IDs to associate with the pull request, space-separated.", - "type": "string" - } - }, - "required": [ - "repositoryId", - "sourceRefName", - "targetRefName", - "title" - ], - "type": "object" - } - }, - { - "name": "repo_create_branch", - "description": "Create a new branch in the repository.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "branchName": { - "description": "The name of the new branch to create, e.g., 'feature-branch'.", - "type": "string" - }, - "repositoryId": { - "description": "The ID of the repository where the branch will be created.", - "type": "string" - }, - "sourceBranchName": { - "default": "main", - "description": "The name of the source branch to create the new branch from. Defaults to 'main'.", - "type": "string" - }, - "sourceCommitId": { - "description": "The commit ID to create the branch from. If not provided, uses the latest commit of the source branch.", - "type": "string" - } - }, - "required": [ - "repositoryId", - "branchName" - ], - "type": "object" - } - }, - { - "name": "repo_update_pull_request", - "description": "Update a Pull Request by ID with specified fields, including setting autocomplete with various completion options.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "autoComplete": { - "description": "Set the pull request to autocomplete when all requirements are met.", - "type": "boolean" - }, - "bypassReason": { - "description": "Reason for bypassing branch policies. When provided, branch policies will be automatically bypassed during autocompletion.", - "type": "string" - }, - "deleteSourceBranch": { - "default": false, - "description": "Whether to delete the source branch when the pull request autocompletes. Defaults to false.", - "type": "boolean" - }, - "description": { - "description": "The new description for the pull request. Must not be longer than 4000 characters.", - "maxLength": 4000, - "type": "string" - }, - "isDraft": { - "description": "Whether the pull request should be a draft.", - "type": "boolean" - }, - "labels": { - "description": "Array of label names to replace existing labels on the pull request. This will remove all current labels and add the specified ones.", - "items": { - "type": "string" - }, - "type": "array" - }, - "mergeStrategy": { - "description": "The merge strategy to use when the pull request autocompletes. Defaults to 'NoFastForward'.", - "enum": [ - "NoFastForward", - "Squash", - "Rebase", - "RebaseMerge" - ], - "type": "string" - }, - "pullRequestId": { - "description": "The ID of the pull request to update.", - "type": "number" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request exists.", - "type": "string" - }, - "status": { - "description": "The new status of the pull request. Can be 'Active' or 'Abandoned'.", - "enum": [ - "Active", - "Abandoned" - ], - "type": "string" - }, - "targetRefName": { - "description": "The new target branch name (e.g., 'refs/heads/main').", - "type": "string" - }, - "title": { - "description": "The new title for the pull request.", - "type": "string" - }, - "transitionWorkItems": { - "default": true, - "description": "Whether to transition associated work items to the next state when the pull request autocompletes. Defaults to true.", - "type": "boolean" - } - }, - "required": [ - "repositoryId", - "pullRequestId" - ], - "type": "object" - } - }, - { - "name": "repo_update_pull_request_reviewers", - "description": "Add or remove reviewers for an existing pull request.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "action": { - "description": "Action to perform on the reviewers. Can be 'add' or 'remove'.", - "enum": [ - "add", - "remove" - ], - "type": "string" - }, - "pullRequestId": { - "description": "The ID of the pull request to update.", - "type": "number" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request exists.", - "type": "string" - }, - "reviewerIds": { - "description": "List of reviewer ids to add or remove from the pull request.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "required": [ - "repositoryId", - "pullRequestId", - "reviewerIds", - "action" - ], - "type": "object" - } - }, - { - "name": "repo_list_repos_by_project", - "description": "Retrieve a list of repositories for a given project", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "repoNameFilter": { - "description": "Optional filter to search for repositories by name. If provided, only repositories with names containing this string will be returned.", - "type": "string" - }, - "skip": { - "default": 0, - "description": "The number of repositories to skip. Defaults to 0.", - "type": "number" - }, - "top": { - "default": 100, - "description": "The maximum number of repositories to return.", - "type": "number" - } - }, - "required": [ - "project" - ], - "type": "object" - } - }, - { - "name": "repo_list_pull_requests_by_repo_or_project", - "description": "Retrieve a list of pull requests for a given repository. Either repositoryId or project must be provided.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "created_by_me": { - "default": false, - "description": "Filter pull requests created by the current user.", - "type": "boolean" - }, - "created_by_user": { - "description": "Filter pull requests created by a specific user (provide email or unique name). Takes precedence over created_by_me if both are provided.", - "type": "string" - }, - "i_am_reviewer": { - "default": false, - "description": "Filter pull requests where the current user is a reviewer.", - "type": "boolean" - }, - "project": { - "description": "The ID of the project where the pull requests are located.", - "type": "string" - }, - "repositoryId": { - "description": "The ID of the repository where the pull requests are located.", - "type": "string" - }, - "skip": { - "default": 0, - "description": "The number of pull requests to skip.", - "type": "number" - }, - "sourceRefName": { - "description": "Filter pull requests from this source branch (e.g., 'refs/heads/feature-branch').", - "type": "string" - }, - "status": { - "default": "Active", - "description": "Filter pull requests by status. Defaults to 'Active'.", - "enum": [ - "NotSet", - "Active", - "Abandoned", - "Completed", - "All" - ], - "type": "string" - }, - "targetRefName": { - "description": "Filter pull requests into this target branch (e.g., 'refs/heads/main').", - "type": "string" - }, - "top": { - "default": 100, - "description": "The maximum number of pull requests to return.", - "type": "number" - }, - "user_is_reviewer": { - "description": "Filter pull requests where a specific user is a reviewer (provide email or unique name). Takes precedence over i_am_reviewer if both are provided.", - "type": "string" - } - }, - "type": "object" - } - }, - { - "name": "repo_list_pull_request_threads", - "description": "Retrieve a list of comment threads for a pull request.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "authorDisplayName": { - "description": "Filter threads by the display name of the thread author (first comment author). Case-insensitive partial matching.", - "type": "string" - }, - "authorEmail": { - "description": "Filter threads by the email of the thread author (first comment author).", - "type": "string" - }, - "baseIteration": { - "description": "The base iteration ID for which to retrieve threads. Optional, defaults to the latest base iteration.", - "type": "number" - }, - "fullResponse": { - "default": false, - "description": "Return full thread JSON response instead of trimmed data.", - "type": "boolean" - }, - "iteration": { - "description": "The iteration ID for which to retrieve threads. Optional, defaults to the latest iteration.", - "type": "number" - }, - "project": { - "description": "Project ID or project name (optional)", - "type": "string" - }, - "pullRequestId": { - "description": "The ID of the pull request for which to retrieve threads.", - "type": "number" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request is located.", - "type": "string" - }, - "skip": { - "default": 0, - "description": "The number of threads to skip after filtering.", - "type": "number" - }, - "status": { - "description": "Filter threads by status. If not specified, returns threads of all statuses.", - "enum": [ - "Unknown", - "Active", - "Fixed", - "WontFix", - "Closed", - "ByDesign", - "Pending" - ], - "type": "string" - }, - "top": { - "default": 100, - "description": "The maximum number of threads to return after filtering.", - "type": "number" - } - }, - "required": [ - "repositoryId", - "pullRequestId" - ], - "type": "object" - } - }, - { - "name": "repo_list_pull_request_thread_comments", - "description": "Retrieve a list of comments in a pull request thread.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "fullResponse": { - "default": false, - "description": "Return full comment JSON response instead of trimmed data.", - "type": "boolean" - }, - "project": { - "description": "Project ID or project name (optional)", - "type": "string" - }, - "pullRequestId": { - "description": "The ID of the pull request for which to retrieve thread comments.", - "type": "number" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request is located.", - "type": "string" - }, - "skip": { - "default": 0, - "description": "The number of comments to skip.", - "type": "number" - }, - "threadId": { - "description": "The ID of the thread for which to retrieve comments.", - "type": "number" - }, - "top": { - "default": 100, - "description": "The maximum number of comments to return.", - "type": "number" - } - }, - "required": [ - "repositoryId", - "pullRequestId", - "threadId" - ], - "type": "object" - } - }, - { - "name": "repo_list_branches_by_repo", - "description": "Retrieve a list of branches for a given repository.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "filterContains": { - "description": "Filter to find branches that contain this string in their name.", - "type": "string" - }, - "repositoryId": { - "description": "The ID of the repository where the branches are located.", - "type": "string" - }, - "top": { - "default": 100, - "description": "The maximum number of branches to return. Defaults to 100.", - "type": "number" - } - }, - "required": [ - "repositoryId" - ], - "type": "object" - } - }, - { - "name": "repo_list_my_branches_by_repo", - "description": "Retrieve a list of my branches for a given repository Id.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "filterContains": { - "description": "Filter to find branches that contain this string in their name.", - "type": "string" - }, - "repositoryId": { - "description": "The ID of the repository where the branches are located.", - "type": "string" - }, - "top": { - "default": 100, - "description": "The maximum number of branches to return.", - "type": "number" - } - }, - "required": [ - "repositoryId" - ], - "type": "object" - } - }, - { - "name": "repo_get_repo_by_name_or_id", - "description": "Get the repository by project and repository name or ID.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "Project name or ID where the repository is located.", - "type": "string" - }, - "repositoryNameOrId": { - "description": "Repository name or ID.", - "type": "string" - } - }, - "required": [ - "project", - "repositoryNameOrId" - ], - "type": "object" - } - }, - { - "name": "repo_get_branch_by_name", - "description": "Get a branch by its name.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "branchName": { - "description": "The name of the branch to retrieve, e.g., 'main' or 'feature-branch'.", - "type": "string" - }, - "repositoryId": { - "description": "The ID of the repository where the branch is located.", - "type": "string" - } - }, - "required": [ - "repositoryId", - "branchName" - ], - "type": "object" - } - }, - { - "name": "repo_get_pull_request_by_id", - "description": "Get a pull request by its ID.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "includeLabels": { - "default": false, - "description": "Whether to include a summary of labels in the response.", - "type": "boolean" - }, - "includeWorkItemRefs": { - "default": false, - "description": "Whether to reference work items associated with the pull request.", - "type": "boolean" - }, - "pullRequestId": { - "description": "The ID of the pull request to retrieve.", - "type": "number" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request is located.", - "type": "string" - } - }, - "required": [ - "repositoryId", - "pullRequestId" - ], - "type": "object" - } - }, - { - "name": "repo_reply_to_comment", - "description": "Replies to a specific comment on a pull request.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "content": { - "description": "The content of the comment to be added.", - "type": "string" - }, - "fullResponse": { - "default": false, - "description": "Return full comment JSON response instead of a simple confirmation message.", - "type": "boolean" - }, - "project": { - "description": "Project ID or project name (optional)", - "type": "string" - }, - "pullRequestId": { - "description": "The ID of the pull request where the comment thread exists.", - "type": "number" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request is located.", - "type": "string" - }, - "threadId": { - "description": "The ID of the thread to which the comment will be added.", - "type": "number" - } - }, - "required": [ - "repositoryId", - "pullRequestId", - "threadId", - "content" - ], - "type": "object" - } - }, - { - "name": "repo_create_pull_request_thread", - "description": "Creates a new comment thread on a pull request.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "content": { - "description": "The content of the comment to be added.", - "type": "string" - }, - "filePath": { - "description": "The path of the file where the comment thread will be created. (optional)", - "type": "string" - }, - "project": { - "description": "Project ID or project name (optional)", - "type": "string" - }, - "pullRequestId": { - "description": "The ID of the pull request where the comment thread exists.", - "type": "number" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request is located.", - "type": "string" - }, - "rightFileEndLine": { - "description": "Position of last character of the thread's span in right file. The line number of a thread's position. Starts at 1. Must be set if rightFileStartLine is also specified. (optional)", - "type": "number" - }, - "rightFileEndOffset": { - "description": "Position of last character of the thread's span in right file. The character offset of a thread's position inside of a line. Must be set if rightFileEndLine is also specified. (optional)", - "type": "number" - }, - "rightFileStartLine": { - "description": "Position of first character of the thread's span in right file. The line number of a thread's position. Starts at 1. (optional)", - "type": "number" - }, - "rightFileStartOffset": { - "description": "Position of first character of the thread's span in right file. The line number of a thread's position. The character offset of a thread's position inside of a line. Starts at 1. Must be set if rightFileStartLine is also specified. (optional)", - "type": "number" - }, - "status": { - "default": "Active", - "description": "The status of the comment thread. Defaults to 'Active'.", - "enum": [ - "Unknown", - "Active", - "Fixed", - "WontFix", - "Closed", - "ByDesign", - "Pending" - ], - "type": "string" - } - }, - "required": [ - "repositoryId", - "pullRequestId", - "content" - ], - "type": "object" - } - }, - { - "name": "repo_update_pull_request_thread", - "description": "Updates an existing comment thread on a pull request.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "Project ID or project name (optional)", - "type": "string" - }, - "pullRequestId": { - "description": "The ID of the pull request where the comment thread exists.", - "type": "number" - }, - "repositoryId": { - "description": "The ID of the repository where the pull request is located.", - "type": "string" - }, - "status": { - "description": "The new status for the comment thread.", - "enum": [ - "Unknown", - "Active", - "Fixed", - "WontFix", - "Closed", - "ByDesign", - "Pending" - ], - "type": "string" - }, - "threadId": { - "description": "The ID of the thread to update.", - "type": "number" - } - }, - "required": [ - "repositoryId", - "pullRequestId", - "threadId" - ], - "type": "object" - } - }, - { - "name": "repo_search_commits", - "description": "Search for commits in a repository with comprehensive filtering capabilities. Supports searching by description/comment text, time range, author, committer, specific commit IDs, and more. This is the unified tool for all commit search operations.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "author": { - "description": "Filter commits by author email or display name", - "type": "string" - }, - "authorEmail": { - "description": "Filter commits by exact author email address", - "type": "string" - }, - "commitIds": { - "description": "Array of specific commit IDs to retrieve. When provided, other filters are ignored except top/skip.", - "items": { - "type": "string" - }, - "type": "array" - }, - "committer": { - "description": "Filter commits by committer email or display name", - "type": "string" - }, - "committerEmail": { - "description": "Filter commits by exact committer email address", - "type": "string" - }, - "fromCommit": { - "description": "Starting commit ID", - "type": "string" - }, - "fromDate": { - "description": "Filter commits from this date (ISO 8601 format, e.g., '2024-01-01T00:00:00Z')", - "type": "string" - }, - "historySimplificationMode": { - "description": "How to simplify the commit history", - "enum": [ - "FirstParent", - "SimplifyMerges", - "FullHistory", - "FullHistorySimplifyMerges" - ], - "type": "string" - }, - "includeLinks": { - "default": false, - "description": "Include commit links", - "type": "boolean" - }, - "includeWorkItems": { - "default": false, - "description": "Include associated work items", - "type": "boolean" - }, - "project": { - "description": "Project name or ID", - "type": "string" - }, - "repository": { - "description": "Repository name or ID", - "type": "string" - }, - "searchText": { - "description": "Search text to filter commits by description/comment. Supports partial matching.", - "type": "string" - }, - "skip": { - "default": 0, - "description": "Number of commits to skip", - "type": "number" - }, - "toCommit": { - "description": "Ending commit ID", - "type": "string" - }, - "toDate": { - "description": "Filter commits to this date (ISO 8601 format, e.g., '2024-12-31T23:59:59Z')", - "type": "string" - }, - "top": { - "default": 10, - "description": "Maximum number of commits to return", - "type": "number" - }, - "version": { - "description": "The name of the branch, tag or commit to filter commits by", - "type": "string" - }, - "versionType": { - "default": "Branch", - "description": "The meaning of the version parameter, e.g., branch, tag or commit", - "enum": [ - "Branch", - "Tag", - "Commit" - ], - "type": "string" - } - }, - "required": [ - "project", - "repository" - ], - "type": "object" - } - }, - { - "name": "repo_list_pull_requests_by_commits", - "description": "Lists pull requests by commit IDs to find which pull requests contain specific commits", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "commits": { - "description": "Array of commit IDs to query for", - "items": { - "type": "string" - }, - "type": "array" - }, - "project": { - "description": "Project name or ID", - "type": "string" - }, - "queryType": { - "default": "LastMergeCommit", - "description": "Type of query to perform", - "enum": [ - "NotSet", - "LastMergeCommit", - "Commit" - ], - "type": "string" - }, - "repository": { - "description": "Repository name or ID", - "type": "string" - } - }, - "required": [ - "project", - "repository", - "commits" - ], - "type": "object" - } - }, - { - "name": "wit_list_backlogs", - "description": "Receive a list of backlogs for a given project and team.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "team": { - "description": "The name or ID of the Azure DevOps team.", - "type": "string" - } - }, - "required": [ - "project", - "team" - ], - "type": "object" - } - }, - { - "name": "wit_list_backlog_work_items", - "description": "Retrieve a list of backlogs of for a given project, team, and backlog category", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "backlogId": { - "description": "The ID of the backlog category to retrieve work items from.", - "type": "string" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "team": { - "description": "The name or ID of the Azure DevOps team.", - "type": "string" - } - }, - "required": [ - "project", - "team", - "backlogId" - ], - "type": "object" - } - }, - { - "name": "wit_my_work_items", - "description": "Retrieve a list of work items relevent to the authenticated user.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "includeCompleted": { - "default": false, - "description": "Whether to include completed work items. Defaults to false.", - "type": "boolean" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "top": { - "default": 50, - "description": "The maximum number of work items to return. Defaults to 50.", - "type": "number" - }, - "type": { - "default": "assignedtome", - "description": "The type of work items to retrieve. Defaults to 'assignedtome'.", - "enum": [ - "assignedtome", - "myactivity" - ], - "type": "string" - } - }, - "required": [ - "project" - ], - "type": "object" - } - }, - { - "name": "wit_get_work_items_batch_by_ids", - "description": "Retrieve list of work items by IDs in batch.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "fields": { - "description": "Optional list of fields to include in the response. If not provided, a hardcoded default set of fields will be used.", - "items": { - "type": "string" - }, - "type": "array" - }, - "ids": { - "description": "The IDs of the work items to retrieve.", - "items": { - "type": "number" - }, - "type": "array" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - } - }, - "required": [ - "project", - "ids" - ], - "type": "object" - } - }, - { - "name": "wit_get_work_item", - "description": "Get a single work item by ID.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "asOf": { - "description": "Optional date string to retrieve the work item as of a specific time. If not provided, the current state will be returned.", - "format": "date-time", - "type": "string" - }, - "expand": { - "description": "Expand options include 'all', 'fields', 'links', 'none', and 'relations'. Relations can be used to get child workitems. Defaults to 'none'.", - "enum": [ - "all", - "fields", - "links", - "none", - "relations" - ], - "type": "string" - }, - "fields": { - "description": "Optional list of fields to include in the response. If not provided, all fields will be returned.", - "items": { - "type": "string" - }, - "type": "array" - }, - "id": { - "description": "The ID of the work item to retrieve.", - "type": "number" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - } - }, - "required": [ - "id", - "project" - ], - "type": "object" - } - }, - { - "name": "wit_list_work_item_comments", - "description": "Retrieve list of comments for a work item by ID.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "top": { - "default": 50, - "description": "Optional number of comments to retrieve. Defaults to all comments.", - "type": "number" - }, - "workItemId": { - "description": "The ID of the work item to retrieve comments for.", - "type": "number" - } - }, - "required": [ - "project", - "workItemId" - ], - "type": "object" - } - }, - { - "name": "wit_add_work_item_comment", - "description": "Add comment to a work item by ID.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "comment": { - "description": "The text of the comment to add to the work item.", - "type": "string" - }, - "format": { - "default": "html", - "enum": [ - "markdown", - "html" - ], - "type": "string" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "workItemId": { - "description": "The ID of the work item to add a comment to.", - "type": "number" - } - }, - "required": [ - "project", - "workItemId", - "comment" - ], - "type": "object" - } - }, - { - "name": "wit_list_work_item_revisions", - "description": "Retrieve list of revisions for a work item by ID.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "expand": { - "default": "None", - "description": "Optional expand parameter to include additional details. Defaults to 'None'.", - "enum": [ - "None", - "Relations", - "Fields", - "Links", - "All" - ], - "type": "string" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "skip": { - "description": "Optional number of revisions to skip for pagination. Defaults to 0.", - "type": "number" - }, - "top": { - "default": 50, - "description": "Optional number of revisions to retrieve. If not provided, all revisions will be returned.", - "type": "number" - }, - "workItemId": { - "description": "The ID of the work item to retrieve revisions for.", - "type": "number" - } - }, - "required": [ - "project", - "workItemId" - ], - "type": "object" - } - }, - { - "name": "wit_add_child_work_items", - "description": "Create one or many child work items from a parent by work item type and parent id.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "items": { - "items": { - "additionalProperties": false, - "properties": { - "areaPath": { - "description": "Optional area path for the child work item.", - "type": "string" - }, - "description": { - "description": "The description of the child work item.", - "type": "string" - }, - "format": { - "default": "Html", - "description": "Format for the description on the child work item, e.g., 'Markdown', 'Html'. Defaults to 'Html'.", - "enum": [ - "Markdown", - "Html" - ], - "type": "string" - }, - "iterationPath": { - "description": "Optional iteration path for the child work item.", - "type": "string" - }, - "title": { - "description": "The title of the child work item.", - "type": "string" - } - }, - "required": [ - "title", - "description" - ], - "type": "object" - }, - "type": "array" - }, - "parentId": { - "description": "The ID of the parent work item to create a child work item under.", - "type": "number" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "workItemType": { - "description": "The type of the child work item to create.", - "type": "string" - } - }, - "required": [ - "parentId", - "project", - "workItemType", - "items" - ], - "type": "object" - } - }, - { - "name": "wit_link_work_item_to_pull_request", - "description": "Link a single work item to an existing pull request.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "projectId": { - "description": "The project ID of the Azure DevOps project (note: project name is not valid).", - "type": "string" - }, - "pullRequestId": { - "description": "The ID of the pull request to link to.", - "type": "number" - }, - "pullRequestProjectId": { - "description": "The project ID containing the pull request. If not provided, defaults to the work item's project ID (for same-project linking).", - "type": "string" - }, - "repositoryId": { - "description": "The ID of the repository containing the pull request. Do not use the repository name here, use the ID instead.", - "type": "string" - }, - "workItemId": { - "description": "The ID of the work item to link to the pull request.", - "type": "number" - } - }, - "required": [ - "projectId", - "repositoryId", - "pullRequestId", - "workItemId" - ], - "type": "object" - } - }, - { - "name": "wit_get_work_items_for_iteration", - "description": "Retrieve a list of work items for a specified iteration.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "iterationId": { - "description": "The ID of the iteration to retrieve work items for.", - "type": "string" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "team": { - "description": "The name or ID of the Azure DevOps team. If not provided, the default team will be used.", - "type": "string" - } - }, - "required": [ - "project", - "iterationId" - ], - "type": "object" - } - }, - { - "name": "wit_update_work_item", - "description": "Update a work item by ID with specified fields.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "id": { - "description": "The ID of the work item to update.", - "type": "number" - }, - "updates": { - "description": "An array of field updates to apply to the work item.", - "items": { - "additionalProperties": false, - "properties": { - "op": { - "default": "add", - "description": "The operation to perform on the field.", - "type": "string" - }, - "path": { - "description": "The path of the field to update, e.g., '/fields/System.Title'.", - "type": "string" - }, - "value": { - "description": "The new value for the field. This is required for 'Add' and 'Replace' operations, and should be omitted for 'Remove' operations.", - "type": "string" - } - }, - "required": [ - "path", - "value" - ], - "type": "object" - }, - "type": "array" - } - }, - "required": [ - "id", - "updates" - ], - "type": "object" - } - }, - { - "name": "wit_get_work_item_type", - "description": "Get a specific work item type.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "workItemType": { - "description": "The name of the work item type to retrieve.", - "type": "string" - } - }, - "required": [ - "project", - "workItemType" - ], - "type": "object" - } - }, - { - "name": "wit_create_work_item", - "description": "Create a new work item in a specified project and work item type.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "fields": { - "description": "A record of field names and values to set on the new work item. Each fild is the field name and each value is the corresponding value to set for that field.", - "items": { - "additionalProperties": false, - "properties": { - "format": { - "description": "the format of the field value, e.g., 'Html', 'Markdown'. Optional, defaults to 'Html'.", - "enum": [ - "Html", - "Markdown" - ], - "type": "string" - }, - "name": { - "description": "The name of the field, e.g., 'System.Title'.", - "type": "string" - }, - "value": { - "description": "The value of the field.", - "type": "string" - } - }, - "required": [ - "name", - "value" - ], - "type": "object" - }, - "type": "array" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "workItemType": { - "description": "The type of work item to create, e.g., 'Task', 'Bug', etc.", - "type": "string" - } - }, - "required": [ - "project", - "workItemType", - "fields" - ], - "type": "object" - } - }, - { - "name": "wit_get_query", - "description": "Get a query by its ID or path.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "depth": { - "default": 0, - "description": "Optional depth parameter to specify how deep to expand the query. Defaults to 0.", - "type": "number" - }, - "expand": { - "description": "Optional expand parameter to include additional details in the response. Defaults to 'None'.", - "enum": [ - "None", - "Wiql", - "Clauses", - "All", - "Minimal" - ], - "type": "string" - }, - "includeDeleted": { - "default": false, - "description": "Whether to include deleted items in the query results. Defaults to false.", - "type": "boolean" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "query": { - "description": "The ID or path of the query to retrieve.", - "type": "string" - }, - "useIsoDateFormat": { - "default": false, - "description": "Whether to use ISO date format in the response. Defaults to false.", - "type": "boolean" - } - }, - "required": [ - "project", - "query" - ], - "type": "object" - } - }, - { - "name": "wit_get_query_results_by_id", - "description": "Retrieve the results of a work item query given the query ID. Supports full or IDs-only response types.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "id": { - "description": "The ID of the query to retrieve results for.", - "type": "string" - }, - "project": { - "description": "The name or ID of the Azure DevOps project. If not provided, the default project will be used.", - "type": "string" - }, - "responseType": { - "default": "full", - "description": "Response type: 'full' returns complete query results (default), 'ids' returns only work item IDs for reduced payload size.", - "enum": [ - "full", - "ids" - ], - "type": "string" - }, - "team": { - "description": "The name or ID of the Azure DevOps team. If not provided, the default team will be used.", - "type": "string" - }, - "timePrecision": { - "description": "Whether to include time precision in the results. Defaults to false.", - "type": "boolean" - }, - "top": { - "default": 50, - "description": "The maximum number of results to return. Defaults to 50.", - "type": "number" - } - }, - "required": [ - "id" - ], - "type": "object" - } - }, - { - "name": "wit_update_work_items_batch", - "description": "Update work items in batch", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "updates": { - "description": "An array of updates to apply to work items. Each update should include the operation (op), work item ID (id), field path (path), and new value (value).", - "items": { - "additionalProperties": false, - "properties": { - "format": { - "description": "The format of the field value. Only to be used for large text fields. e.g., 'Html', 'Markdown'. Optional, defaults to 'Html'.", - "enum": [ - "Html", - "Markdown" - ], - "type": "string" - }, - "id": { - "description": "The ID of the work item to update.", - "type": "number" - }, - "op": { - "default": "Add", - "description": "The operation to perform on the field.", - "enum": [ - "Add", - "Replace", - "Remove" - ], - "type": "string" - }, - "path": { - "description": "The path of the field to update, e.g., '/fields/System.Title'.", - "type": "string" - }, - "value": { - "description": "The new value for the field. This is required for 'add' and 'replace' operations, and should be omitted for 'remove' operations.", - "type": "string" - } - }, - "required": [ - "id", - "path", - "value" - ], - "type": "object" - }, - "type": "array" - } - }, - "required": [ - "updates" - ], - "type": "object" - } - }, - { - "name": "wit_work_items_link", - "description": "Link work items together in batch.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "updates": { - "items": { - "additionalProperties": false, - "properties": { - "comment": { - "description": "Optional comment to include with the link. This can be used to provide additional context for the link being created.", - "type": "string" - }, - "id": { - "description": "The ID of the work item to update.", - "type": "number" - }, - "linkToId": { - "description": "The ID of the work item to link to.", - "type": "number" - }, - "type": { - "default": "related", - "description": "Type of link to create between the work items. Options include 'parent', 'child', 'duplicate', 'duplicate of', 'related', 'successor', 'predecessor', 'tested by', 'tests', 'affects', and 'affected by'. Defaults to 'related'.", - "enum": [ - "parent", - "child", - "duplicate", - "duplicate of", - "related", - "successor", - "predecessor", - "tested by", - "tests", - "affects", - "affected by" - ], - "type": "string" - } - }, - "required": [ - "id", - "linkToId" - ], - "type": "object" - }, - "type": "array" - } - }, - "required": [ - "project", - "updates" - ], - "type": "object" - } - }, - { - "name": "wit_work_item_unlink", - "description": "Remove one or many links from a single work item", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "id": { - "description": "The ID of the work item to remove the links from.", - "type": "number" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "type": { - "default": "related", - "description": "Type of link to remove. Options include 'parent', 'child', 'duplicate', 'duplicate of', 'related', 'successor', 'predecessor', 'tested by', 'tests', 'affects', 'affected by', and 'artifact'. Defaults to 'related'.", - "enum": [ - "parent", - "child", - "duplicate", - "duplicate of", - "related", - "successor", - "predecessor", - "tested by", - "tests", - "affects", - "affected by", - "artifact" - ], - "type": "string" - }, - "url": { - "description": "Optional URL to match for the link to remove. If not provided, all links of the specified type will be removed.", - "type": "string" - } - }, - "required": [ - "project", - "id" - ], - "type": "object" - } - }, - { - "name": "wit_add_artifact_link", - "description": "Add artifact links (repository, branch, commit, builds) to work items. You can either provide the full vstfs URI or the individual components to build it automatically.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "artifactUri": { - "description": "The complete VSTFS URI of the artifact to link. If provided, individual component parameters are ignored.", - "type": "string" - }, - "branchName": { - "description": "The branch name (e.g., 'main'). Required when linkType is 'Branch'.", - "type": "string" - }, - "buildId": { - "description": "The build ID. Required when linkType is 'Build', 'Found in build', or 'Integrated in build'.", - "type": "number" - }, - "comment": { - "description": "Comment to include with the artifact link.", - "type": "string" - }, - "commitId": { - "description": "The commit SHA hash. Required when linkType is 'Fixed in Commit'.", - "type": "string" - }, - "linkType": { - "default": "Branch", - "description": "Type of artifact link, defaults to 'Branch'. This determines both the link type and how to build the VSTFS URI from individual components.", - "enum": [ - "Branch", - "Build", - "Fixed in Changeset", - "Fixed in Commit", - "Found in build", - "Integrated in build", - "Model Link", - "Pull Request", - "Related Workitem", - "Result Attachment", - "Source Code File", - "Tag", - "Test Result", - "Wiki" - ], - "type": "string" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "projectId": { - "description": "The project ID (GUID) containing the artifact. Required for Git artifacts when artifactUri is not provided.", - "type": "string" - }, - "pullRequestId": { - "description": "The pull request ID. Required when linkType is 'Pull Request'.", - "type": "number" - }, - "repositoryId": { - "description": "The repository ID (GUID) containing the artifact. Required for Git artifacts when artifactUri is not provided.", - "type": "string" - }, - "workItemId": { - "description": "The ID of the work item to add the artifact link to.", - "type": "number" - } - }, - "required": [ - "workItemId", - "project" - ], - "type": "object" - } - }, - { - "name": "wiki_get_wiki", - "description": "Get the wiki by wikiIdentifier", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "The project name or ID where the wiki is located. If not provided, the default project will be used.", - "type": "string" - }, - "wikiIdentifier": { - "description": "The unique identifier of the wiki.", - "type": "string" - } - }, - "required": [ - "wikiIdentifier" - ], - "type": "object" - } - }, - { - "name": "wiki_list_wikis", - "description": "Retrieve a list of wikis for an organization or project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project": { - "description": "The project name or ID to filter wikis. If not provided, all wikis in the organization will be returned.", - "type": "string" - } - }, - "type": "object" - } - }, - { - "name": "wiki_list_pages", - "description": "Retrieve a list of wiki pages for a specific wiki and project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "continuationToken": { - "description": "Token for pagination to retrieve the next set of pages.", - "type": "string" - }, - "pageViewsForDays": { - "description": "Number of days to retrieve page views for. If not specified, page views are not included.", - "type": "number" - }, - "project": { - "description": "The project name or ID where the wiki is located.", - "type": "string" - }, - "top": { - "default": 20, - "description": "The maximum number of pages to return. Defaults to 20.", - "type": "number" - }, - "wikiIdentifier": { - "description": "The unique identifier of the wiki.", - "type": "string" - } - }, - "required": [ - "wikiIdentifier", - "project" - ], - "type": "object" - } - }, - { - "name": "wiki_get_page", - "description": "Retrieve wiki page metadata by path. This tool does not return page content.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "path": { - "description": "The path of the wiki page (e.g., '/Home' or '/Documentation/Setup').", - "type": "string" - }, - "project": { - "description": "The project name or ID where the wiki is located.", - "type": "string" - }, - "recursionLevel": { - "description": "Recursion level for subpages. 'None' returns only the specified page. 'OneLevel' includes direct children. 'Full' includes all descendants.", - "enum": [ - "None", - "OneLevel", - "OneLevelPlusNestedEmptyFolders", - "Full" - ], - "type": "string" - }, - "wikiIdentifier": { - "description": "The unique identifier of the wiki.", - "type": "string" - } - }, - "required": [ - "wikiIdentifier", - "project", - "path" - ], - "type": "object" - } - }, - { - "name": "wiki_get_page_content", - "description": "Retrieve wiki page content. Provide either a 'url' parameter OR the combination of 'wikiIdentifier' and 'project' parameters.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "path": { - "description": "The path of the wiki page to retrieve content for. Optional, defaults to root page if not provided.", - "type": "string" - }, - "project": { - "description": "The project name or ID where the wiki is located. Required if url is not provided.", - "type": "string" - }, - "url": { - "description": "The full URL of the wiki page to retrieve content for. If provided, wikiIdentifier, project, and path are ignored. Supported patterns: https://dev.azure.com/{org}/{project}/_wiki/wikis/{wikiIdentifier}?pagePath=%2FMy%20Page and https://dev.azure.com/{org}/{project}/_wiki/wikis/{wikiIdentifier}/{pageId}/Page-Title", - "type": "string" - }, - "wikiIdentifier": { - "description": "The unique identifier of the wiki. Required if url is not provided.", - "type": "string" - } - }, - "type": "object" - } - }, - { - "name": "wiki_create_or_update_page", - "description": "Create or update a wiki page with content.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "branch": { - "default": "wikiMaster", - "description": "The branch name for the wiki repository. Defaults to 'wikiMaster' which is the default branch for Azure DevOps wikis.", - "type": "string" - }, - "content": { - "description": "The content of the wiki page in markdown format.", - "type": "string" - }, - "etag": { - "description": "ETag for editing existing pages (optional, will be fetched if not provided).", - "type": "string" - }, - "path": { - "description": "The path of the wiki page (e.g., '/Home' or '/Documentation/Setup').", - "type": "string" - }, - "project": { - "description": "The project name or ID where the wiki is located. If not provided, the default project will be used.", - "type": "string" - }, - "wikiIdentifier": { - "description": "The unique identifier or name of the wiki.", - "type": "string" - } - }, - "required": [ - "wikiIdentifier", - "path", - "content" - ], - "type": "object" - } - }, - { - "name": "testplan_list_test_plans", - "description": "Retrieve a paginated list of test plans from an Azure DevOps project. Allows filtering for active plans and toggling detailed information.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "continuationToken": { - "description": "Token to continue fetching test plans from a previous request.", - "type": "string" - }, - "filterActivePlans": { - "default": true, - "description": "Filter to include only active test plans. Defaults to true.", - "type": "boolean" - }, - "includePlanDetails": { - "default": false, - "description": "Include detailed information about each test plan.", - "type": "boolean" - }, - "project": { - "description": "The unique identifier (ID or name) of the Azure DevOps project.", - "type": "string" - } - }, - "required": [ - "project" - ], - "type": "object" - } - }, - { - "name": "testplan_create_test_plan", - "description": "Creates a new test plan in the project.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "areaPath": { - "description": "The area path for the test plan", - "type": "string" - }, - "description": { - "description": "The description of the test plan", - "type": "string" - }, - "endDate": { - "description": "The end date of the test plan", - "type": "string" - }, - "iteration": { - "description": "The iteration path for the test plan", - "type": "string" - }, - "name": { - "description": "The name of the test plan to be created.", - "type": "string" - }, - "project": { - "description": "The unique identifier (ID or name) of the Azure DevOps project where the test plan will be created.", - "type": "string" - }, - "startDate": { - "description": "The start date of the test plan", - "type": "string" - } - }, - "required": [ - "project", - "name", - "iteration" - ], - "type": "object" - } - }, - { - "name": "testplan_create_test_suite", - "description": "Creates a new test suite in a test plan.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "name": { - "description": "Name of the child test suite", - "type": "string" - }, - "parentSuiteId": { - "description": "ID of the parent suite under which the new suite will be created, if not given by user this can be id of a root suite of the test plan", - "type": "number" - }, - "planId": { - "description": "ID of the test plan that contains the suites", - "type": "number" - }, - "project": { - "description": "Project ID or project name", - "type": "string" - } - }, - "required": [ - "project", - "planId", - "parentSuiteId", - "name" - ], - "type": "object" - } - }, - { - "name": "testplan_add_test_cases_to_suite", - "description": "Adds existing test cases to a test suite.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "planId": { - "description": "The ID of the test plan.", - "type": "number" - }, - "project": { - "description": "The unique identifier (ID or name) of the Azure DevOps project.", - "type": "string" - }, - "suiteId": { - "description": "The ID of the test suite.", - "type": "number" - }, - "testCaseIds": { - "anyOf": [ - { - "type": "string" - }, - { - "items": { - "type": "string" - }, - "type": "array" - } - ], - "description": "The ID(s) of the test case(s) to add. " - } - }, - "required": [ - "project", - "planId", - "suiteId", - "testCaseIds" - ], - "type": "object" - } - }, - { - "name": "testplan_create_test_case", - "description": "Creates a new test case work item.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "areaPath": { - "description": "The area path for the test case.", - "type": "string" - }, - "iterationPath": { - "description": "The iteration path for the test case.", - "type": "string" - }, - "priority": { - "description": "The priority of the test case.", - "type": "number" - }, - "project": { - "description": "The unique identifier (ID or name) of the Azure DevOps project.", - "type": "string" - }, - "steps": { - "description": "The steps to reproduce the test case. Make sure to format each step as '1. Step one|Expected result one\n2. Step two|Expected result two. USE '|' as the delimiter between step and expected result. DO NOT use '|' in the description of the step or expected result.", - "type": "string" - }, - "testsWorkItemId": { - "description": "Optional work item id that will be set as a Microsoft.VSTS.Common.TestedBy-Reverse link to the test case.", - "type": "number" - }, - "title": { - "description": "The title of the test case.", - "type": "string" - } - }, - "required": [ - "project", - "title" - ], - "type": "object" - } - }, - { - "name": "testplan_update_test_case_steps", - "description": "Update an existing test case work item.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "id": { - "description": "The ID of the test case work item to update.", - "type": "number" - }, - "steps": { - "description": "The steps to reproduce the test case. Make sure to format each step as '1. Step one|Expected result one\n2. Step two|Expected result two. USE '|' as the delimiter between step and expected result. DO NOT use '|' in the description of the step or expected result.", - "type": "string" - } - }, - "required": [ - "id", - "steps" - ], - "type": "object" - } - }, - { - "name": "testplan_list_test_cases", - "description": "Gets a list of test cases in the test plan.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "planid": { - "description": "The ID of the test plan.", - "type": "number" - }, - "project": { - "description": "The unique identifier (ID or name) of the Azure DevOps project.", - "type": "string" - }, - "suiteid": { - "description": "The ID of the test suite.", - "type": "number" - } - }, - "required": [ - "project", - "planid", - "suiteid" - ], - "type": "object" - } - }, - { - "name": "testplan_show_test_results_from_build_id", - "description": "Gets a list of test results for a given project and build ID.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "buildid": { - "description": "The ID of the build.", - "type": "number" - }, - "project": { - "description": "The unique identifier (ID or name) of the Azure DevOps project.", - "type": "string" - } - }, - "required": [ - "project", - "buildid" - ], - "type": "object" - } - }, - { - "name": "testplan_list_test_suites", - "description": "Retrieve a paginated list of test suites from an Azure DevOps project and Test Plan Id.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "continuationToken": { - "description": "Token to continue fetching test plans from a previous request.", - "type": "string" - }, - "planId": { - "description": "The ID of the test plan.", - "type": "number" - }, - "project": { - "description": "The unique identifier (ID or name) of the Azure DevOps project.", - "type": "string" - } - }, - "required": [ - "project", - "planId" - ], - "type": "object" - } - }, - { - "name": "search_code", - "description": "Search Azure DevOps Repositories for a given search text", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "branch": { - "description": "Filter by branches", - "items": { - "type": "string" - }, - "type": "array" - }, - "includeFacets": { - "default": false, - "description": "Include facets in the search results", - "type": "boolean" - }, - "path": { - "description": "Filter by paths", - "items": { - "type": "string" - }, - "type": "array" - }, - "project": { - "description": "Filter by projects", - "items": { - "type": "string" - }, - "type": "array" - }, - "repository": { - "description": "Filter by repositories", - "items": { - "type": "string" - }, - "type": "array" - }, - "searchText": { - "description": "Keywords to search for in code repositories", - "type": "string" - }, - "skip": { - "default": 0, - "description": "Number of results to skip", - "type": "number" - }, - "top": { - "default": 5, - "description": "Maximum number of results to return", - "type": "number" - } - }, - "required": [ - "searchText" - ], - "type": "object" - } - }, - { - "name": "search_wiki", - "description": "Search Azure DevOps Wiki for a given search text", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "includeFacets": { - "default": false, - "description": "Include facets in the search results", - "type": "boolean" - }, - "project": { - "description": "Filter by projects", - "items": { - "type": "string" - }, - "type": "array" - }, - "searchText": { - "description": "Keywords to search for wiki pages", - "type": "string" - }, - "skip": { - "default": 0, - "description": "Number of results to skip", - "type": "number" - }, - "top": { - "default": 10, - "description": "Maximum number of results to return", - "type": "number" - }, - "wiki": { - "description": "Filter by wiki names", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "required": [ - "searchText" - ], - "type": "object" - } - }, - { - "name": "search_workitem", - "description": "Get Azure DevOps Work Item search results for a given search text", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "areaPath": { - "description": "Filter by area paths", - "items": { - "type": "string" - }, - "type": "array" - }, - "assignedTo": { - "description": "Filter by assigned to users", - "items": { - "type": "string" - }, - "type": "array" - }, - "includeFacets": { - "default": false, - "description": "Include facets in the search results", - "type": "boolean" - }, - "project": { - "description": "Filter by projects", - "items": { - "type": "string" - }, - "type": "array" - }, - "searchText": { - "description": "Search text to find in work items", - "type": "string" - }, - "skip": { - "default": 0, - "description": "Number of results to skip for pagination", - "type": "number" - }, - "state": { - "description": "Filter by work item states", - "items": { - "type": "string" - }, - "type": "array" - }, - "top": { - "default": 10, - "description": "Number of results to return", - "type": "number" - }, - "workItemType": { - "description": "Filter by work item types", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "required": [ - "searchText" - ], - "type": "object" - } - }, - { - "name": "advsec_get_alerts", - "description": "Retrieve Advanced Security alerts for a repository.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "alertType": { - "description": "Filter alerts by type. If not specified, returns all alert types.", - "enum": [ - "Unknown", - "Dependency", - "Secret", - "Code", - "License" - ], - "type": "string" - }, - "confidenceLevels": { - "default": [ - "high", - "other" - ], - "description": "Filter alerts by confidence levels. Only applicable for secret alerts. Defaults to both 'high' and 'other'.", - "items": { - "enum": [ - "High", - "Other" - ], - "type": "string" - }, - "type": "array" - }, - "continuationToken": { - "description": "Continuation token for pagination.", - "type": "string" - }, - "onlyDefaultBranch": { - "default": true, - "description": "If true, only return alerts found on the default branch. Defaults to true.", - "type": "boolean" - }, - "orderBy": { - "default": "severity", - "description": "Order results by specified field. Defaults to 'severity'.", - "enum": [ - "id", - "firstSeen", - "lastSeen", - "fixedOn", - "severity" - ], - "type": "string" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "ref": { - "description": "Filter alerts by git reference (branch). If not provided and onlyDefaultBranch is true, only includes alerts from default branch.", - "type": "string" - }, - "repository": { - "description": "The name or ID of the repository to get alerts for.", - "type": "string" - }, - "ruleId": { - "description": "Filter alerts by rule ID.", - "type": "string" - }, - "ruleName": { - "description": "Filter alerts by rule name.", - "type": "string" - }, - "severities": { - "description": "Filter alerts by severity level. If not specified, returns alerts at any severity.", - "items": { - "enum": [ - "Low", - "Medium", - "High", - "Critical", - "Note", - "Warning", - "Error", - "Undefined" - ], - "type": "string" - }, - "type": "array" - }, - "states": { - "description": "Filter alerts by state. If not specified, returns alerts in any state.", - "items": { - "enum": [ - "Unknown", - "Active", - "Dismissed", - "Fixed", - "AutoDismissed" - ], - "type": "string" - }, - "type": "array" - }, - "toolName": { - "description": "Filter alerts by tool name.", - "type": "string" - }, - "top": { - "default": 100, - "description": "Maximum number of alerts to return. Defaults to 100.", - "type": "number" - }, - "validity": { - "description": "Filter alerts by validity status. Only applicable for secret alerts.", - "items": { - "enum": [ - "None", - "Unknown", - "Active", - "Inactive" - ], - "type": "string" - }, - "type": "array" - } - }, - "required": [ - "project", - "repository", - "confidenceLevels" - ], - "type": "object" - } - }, - { - "name": "advsec_get_alert_details", - "description": "Get detailed information about a specific Advanced Security alert.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "alertId": { - "description": "The ID of the alert to retrieve details for.", - "type": "number" - }, - "project": { - "description": "The name or ID of the Azure DevOps project.", - "type": "string" - }, - "ref": { - "description": "Git reference (branch) to filter the alert.", - "type": "string" - }, - "repository": { - "description": "The name or ID of the repository containing the alert.", - "type": "string" - } - }, - "required": [ - "project", - "repository", - "alertId" - ], - "type": "object" - } - } - ], - "refreshed_at": "2026-01-29T15:08:50.565179300+00:00" - }, - "msft-learn": { - "name": "msft-learn", - "builtin": true, - "tools": [ - { - "name": "microsoft_docs_search", - "description": "Search official Microsoft/Azure documentation to find the most relevant and trustworthy content for a user's query. This tool returns up to 10 high-quality content chunks (each max 500 tokens), extracted from Microsoft Learn and other official sources. Each result includes the article title, URL, and a self-contained content excerpt optimized for fast retrieval and reasoning. Always use this tool to quickly ground your answers in accurate, first-party Microsoft/Azure knowledge.\n\n## Follow-up Pattern\nTo ensure completeness, use microsoft_docs_fetch when high-value pages are identified by search. The fetch tool complements search by providing the full detail. This is a required step for comprehensive results.", - "input_schema": { - "properties": { - "query": { - "default": null, - "description": "a query or topic about Microsoft/Azure products, services, platforms, developer tools, frameworks, or APIs", - "type": "string" - } - }, - "type": "object" - } - }, - { - "name": "microsoft_code_sample_search", - "description": "Search for code snippets and examples in official Microsoft Learn documentation. This tool retrieves relevant code samples from Microsoft documentation pages providing developers with practical implementation examples and best practices for Microsoft/Azure products and services related coding tasks. This tool will help you use the **LATEST OFFICIAL** code snippets to empower coding capabilities.\n\n## When to Use This Tool\n- When you are going to provide sample Microsoft/Azure related code snippets in your answers.\n- When you are **generating any Microsoft/Azure related code**.\n\n## Usage Pattern\nInput a descriptive query, or SDK/class/method name to retrieve related code samples. The optional parameter `language` can help to filter results.\n\nEligible values for `language` parameter include: csharp javascript typescript python powershell azurecli al sql java kusto cpp go rust ruby php", - "input_schema": { - "properties": { - "language": { - "default": null, - "description": "Optional parameter specifying the programming language of code snippets to retrieve. Can significantly improve search quality if provided. Eligible values: csharp javascript typescript python powershell azurecli al sql java kusto cpp go rust ruby php", - "type": "string" - }, - "query": { - "description": "a descriptive query, SDK name, method name or code snippet related to Microsoft/Azure products, services, platforms, developer tools, frameworks, APIs or SDKs", - "type": "string" - } - }, - "required": [ - "query" - ], - "type": "object" - } - }, - { - "name": "microsoft_docs_fetch", - "description": "Fetch and convert a Microsoft Learn documentation page to markdown format. This tool retrieves the latest complete content of Microsoft documentation pages including Azure, .NET, Microsoft 365, and other Microsoft technologies.\n\n## When to Use This Tool\n- When search results provide incomplete information or truncated content\n- When you need complete step-by-step procedures or tutorials\n- When you need troubleshooting sections, prerequisites, or detailed explanations\n- When search results reference a specific page that seems highly relevant\n- For comprehensive guides that require full context\n\n## Usage Pattern\nUse this tool AFTER microsoft_docs_search when you identify specific high-value pages that need complete content. The search tool gives you an overview; this tool gives you the complete picture.\n\n## URL Requirements\n- The URL must be a valid link from the microsoft.com domain.\n\n## Output Format\nmarkdown with headings, code blocks, tables, and links preserved.", - "input_schema": { - "properties": { - "url": { - "description": "URL of the Microsoft documentation page to read", - "type": "string" - } - }, - "required": [ - "url" - ], - "type": "object" - } - } - ], - "refreshed_at": "2026-01-29T15:09:30.073716200+00:00" - }, - "bluebird": { - "name": "bluebird", - "builtin": true, - "tools": [ - { - "name": "engineering_copilot_dynamic_tool_invoker", - "description": "Dynamically invokes a tool from engineering_copilot using the tool name and input.The tool input should only comes from the parameter section of the tool from the engineer copilot instructions.", - "input_schema": { - "properties": { - "toolInput": { - "description": "The input to pass to the tool", - "type": "string" - }, - "toolName": { - "description": "The name of the engineering_copilot tool to invoke", - "type": "string" - } - }, - "required": [ - "toolName", - "toolInput" - ], - "type": "object" - } - }, - { - "name": "engineering_copilot", - "description": "Retrieves system instructions and session context for the Engineering Copilot MCP server. This tool initializes the session with tailored guidance for handling development tasks.\r\n\r\nWhen to use this tool:\r\n- Call this tool at the start of a new conversation before performing other actions such as searching the workspace, reading files, or analyzing code.\r\n- This tool provides the foundational context that informs how subsequent tools should be selected and used.\r\n- The returned instructions contain information about available capabilities, recommended workflows, and task-specific strategies.\r\n\r\nWhy call this tool first:\r\n- It ensures the agent operates with the correct rules and context for the current session.\r\n- It provides updated system prompts that may affect how other tools behave or should be invoked.\r\n- Skipping this step may result in suboptimal tool selection or missing important session-specific guidance.\r\n\r\nParameters:\r\n- original_user_question: The user's question or prompt, passed exactly as provided.\r\n- conversationHistory: The conversation history for context-aware responses.\r\n\r\nReturns specialized instructions tailored to the user's request and session context.", - "input_schema": { - "properties": { - "conversationHistory": { - "description": "Conversation history", - "type": "string" - }, - "original_user_question": { - "description": "The original user question or prompt. Copy paste it AS IS.", - "type": "string" - } - }, - "required": [ - "original_user_question", - "conversationHistory" - ], - "type": "object" - } - } - ], - "refreshed_at": "2026-01-29T15:08:55.397366500+00:00" - }, - "ado-ext": { - "name": "ado-ext", - "builtin": true, - "tools": [ - { - "name": "get_component_governance_instructions", - "description": "Get specific instructions on how to fix a component governance alert. Use this tool to get more specific instructions on how to fix a component governance security alert.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "Request for Azure DevOps Component Governance Instructions", - "properties": { - "alert_id": { - "description": "Alert Id", - "format": "int32", - "type": "integer" - }, - "branch": { - "description": "Branch name (e.g., 'main', 'develop')", - "type": "string" - }, - "organization": { - "description": "Organization Name", - "type": "string" - }, - "project": { - "description": "Project Name or GUID", - "type": "string" - }, - "repository": { - "description": "Repository Id or Name", - "type": "string" - } - }, - "required": [ - "organization", - "project", - "repository", - "alert_id", - "branch" - ], - "title": "ComponentGovernanceInstructionsRequest", - "type": "object" - } - }, - { - "name": "get_component_governance_alert", - "description": "Get the component governance alert for the alert id. Use this tool when you need to get additional info for security vulnerabilities, license compliance issues, or other component governance alerts whenever an html link to azure dev ops containing componentGovernance substring in the user prompt.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "Request for Azure DevOps Component Governance Alert", - "properties": { - "alert_id": { - "description": "Alert Id", - "format": "int32", - "type": "integer" - }, - "organization": { - "description": "Organization Name", - "type": "string" - }, - "project": { - "description": "Project Name or GUID", - "type": "string" - }, - "repository": { - "description": "Repository Id or Name", - "type": "string" - } - }, - "required": [ - "organization", - "project", - "repository", - "alert_id" - ], - "title": "ComponentGovernanceAlertRequest", - "type": "object" - } - }, - { - "name": "get_component_governance_alerts", - "description": "Get multiple component governance alerts for a repository. Use this tool to retrieve all alerts or filter by snapshot type or alert state. Alert state enum values: Unknown=0, Active=1, Dismissed=2, Fixed=4, AutoDismissed=8. Returns a list of security vulnerabilities, license compliance issues, and other component governance alerts.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "Request for Azure DevOps Component Governance Alerts (multiple)", - "properties": { - "alert_state": { - "description": "Alert State (optional) - Enum integer values: Unknown=0, Active=1, Dismissed=2, Fixed=4, AutoDismissed=8", - "format": "int32", - "nullable": true, - "type": "integer" - }, - "organization": { - "description": "Organization Name", - "type": "string" - }, - "project": { - "description": "Project Name or GUID", - "type": "string" - }, - "repository": { - "description": "Repository Id or Name", - "type": "string" - }, - "snapshot_type_id": { - "description": "Snapshot Type Id (optional) - Filter by snapshot type", - "format": "int32", - "nullable": true, - "type": "integer" - } - }, - "required": [ - "organization", - "project", - "repository" - ], - "title": "ComponentGovernanceAlertsRequest", - "type": "object" - } - }, - { - "name": "get_component_governance_alert_from_link", - "description": "Get component governance alert from a component governance link. Example link: https://dev.azure.com/{organization}/{project}/_componentGovernance/{componentGovernanceId}?alertId={alertId}", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "Request for Azure DevOps Component Governance Alert from Link", - "properties": { - "alert_id": { - "description": "Alert Id", - "format": "int32", - "type": "integer" - }, - "organization": { - "description": "Organization Name", - "type": "string" - }, - "project": { - "description": "Project Name or GUID", - "type": "string" - }, - "repository": { - "description": "Repository Id or Name", - "type": "string" - }, - "snapshot_type_id": { - "description": "Snapshot Type Id (optional)", - "nullable": true, - "type": "string" - } - }, - "required": [ - "organization", - "project", - "repository", - "alert_id" - ], - "title": "ComponentGovernanceAlertFromLinkRequest", - "type": "object" - } - } - ], - "refreshed_at": "2026-01-29T15:08:50.737177200+00:00" - }, - "kusto": { - "name": "kusto", - "builtin": true, - "tools": [], - "refreshed_at": "2026-01-29T15:08:50.779286300+00:00", - "error": "Invalid JSON response: " - }, - "icm": { - "name": "icm", - "builtin": true, - "tools": [], - "refreshed_at": "2026-01-29T15:09:25.402397200+00:00", - "error": "Timeout after 30s" - }, - "stack": { - "name": "stack", - "builtin": true, - "tools": [ - { - "name": "push_multiple", - "description": "Push multiple strings onto the stack in the provided order", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "values": { - "description": "Array of string values to push onto the stack", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "required": [ - "values" - ], - "title": "PushMultipleRequest", - "type": "object" - } - }, - { - "name": "peek", - "description": "Peek at the top value without removing it; returns value and remaining count", - "input_schema": { - "properties": {}, - "type": "object" - } - }, - { - "name": "pop_all", - "description": "Pop all values from the stack and return them in pop order (top first)", - "input_schema": { - "properties": {}, - "type": "object" - } - }, - { - "name": "clear", - "description": "Clear the stack without returning any values", - "input_schema": { - "properties": {}, - "type": "object" - } - }, - { - "name": "push", - "description": "Push a single string onto the stack", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "value": { - "description": "The string value to push onto the stack", - "type": "string" - } - }, - "required": [ - "value" - ], - "title": "PushRequest", - "type": "object" - } - }, - { - "name": "pop", - "description": "Pop the top value from the stack; returns value and remaining count", - "input_schema": { - "properties": {}, - "type": "object" - } - } - ], - "refreshed_at": "2026-01-29T15:08:44.417568900+00:00" - }, - "calculator": { - "name": "calculator", - "builtin": true, - "tools": [ - { - "name": "sub", - "description": "Calculate the difference of two numbers", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "a": { - "description": "the left hand side number", - "format": "int32", - "type": "integer" - }, - "b": { - "description": "the right hand side number", - "format": "int32", - "type": "integer" - } - }, - "required": [ - "a", - "b" - ], - "title": "SubRequest", - "type": "object" - } - }, - { - "name": "sum", - "description": "Calculate the sum of two numbers", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "a": { - "description": "the left hand side number", - "format": "int32", - "type": "integer" - }, - "b": { - "description": "the right hand side number", - "format": "int32", - "type": "integer" - } - }, - "required": [ - "a", - "b" - ], - "title": "SumRequest", - "type": "object" - } - } - ], - "refreshed_at": "2026-01-29T15:08:44.355387200+00:00" - }, - "es-chat": { - "name": "es-chat", - "builtin": true, - "tools": [ - { - "name": "es_resolve", - "description": "Attempts to resolve the identifier of an entity in engineering systems (aka engineering, engsys, es etc.) to information about the entity.", - "input_schema": { - "properties": { - "identifier": { - "description": "An entity identifier (preferably a URL or GUID, sometimes just a numeric ID) that is present in the conversation", - "type": "string" - }, - "question": { - "description": "A self-sufficient version of the question that prompted the call to this resolve tool. DO NOT qualify with 'engineering systems', 'engineering', 'engsys', 'es' etc. as the tool is already scoped to engineering and this will mislead it.", - "type": "string" - } - }, - "required": [ - "identifier", - "question" - ], - "type": "object" - } - }, - { - "name": "es_ask", - "description": "Asks ES Chat a question about engineering systems (aka engineering, engsys, es etc.) ES Chat is an AI-powered support assistant for Microsoft engineering systems that can provide information about engineering systems, assist with onboarding procedures, and help diagnose concerns regarding engineering assets. It searches internal knowledgebases such as the Engineering Hub (aka Eng Hub), ADO wikis and work items, IcM incidents and custom sources, and has a wealth of custom tools that can perform specific engineering tasks.", - "input_schema": { - "properties": { - "question": { - "description": "A self-sufficient version of the question that prompted the call to this search tool. DO NOT qualify with 'engineering systems', 'engineering', 'engsys', 'es' etc. as the tool is already scoped to engineering and this will mislead it.", - "type": "string" - } - }, - "required": [ - "question" - ], - "type": "object" - } - }, - { - "name": "es_search", - "description": "Searches for information across various engineering systems (aka engineering, engsys, es etc.) to address the current question.", - "input_schema": { - "properties": { - "keywords": { - "description": "Keywords representing concepts or problems underlying the current question. DO NOT qualify with 'engineering systems', 'engineering', 'engsys', 'es' etc. as the tool is already scoped to engineering and this will mislead it.", - "type": "string" - }, - "question": { - "description": "A self-sufficient version of the question that prompted the call to this search tool. DO NOT qualify with 'engineering systems', 'engineering', 'engsys', 'es' etc. as the tool is already scoped to engineering and this will mislead it.", - "type": "string" - } - }, - "required": [ - "keywords", - "question" - ], - "type": "object" - } - } - ], - "refreshed_at": "2026-01-29T15:09:29.426006+00:00" - }, - "asa": { - "name": "asa", - "builtin": true, - "tools": [ - { - "name": "report_progress", - "description": "Reports progress on the current task. Commits changes and generates a PR description.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "commit_message": { - "description": "Short single-line commit message", - "type": "string" - }, - "pr_description": { - "description": "Markdown checklist of completed/pending work", - "type": "string" - } - }, - "required": [ - "commit_message", - "pr_description" - ], - "type": "object" - } - }, - { - "name": "build_and_validate", - "description": "Builds and validates the codebase. Use after making code changes.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "project_type": { - "description": "Override auto-detected type: rust, typescript, dotnet", - "type": "string" - }, - "run_validation": { - "description": "Run linting/formatting checks (default: true)", - "type": "boolean" - }, - "target": { - "description": "Specific target/project to build", - "type": "string" - }, - "working_directory": { - "description": "Working directory (default: git root)", - "type": "string" - } - }, - "required": [], - "type": "object" - } - }, - { - "name": "reply_to_comment", - "description": "Replies to a comment thread on the current PR.", - "input_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "comment_thread_id": { - "description": "ID from in a block", - "type": "string" - }, - "content": { - "description": "Reply message content", - "type": "string" - } - }, - "required": [ - "comment_thread_id", - "content" - ], - "type": "object" - } - } - ], - "refreshed_at": "2026-02-05T00:00:00.000000+00:00" - } - } -} \ No newline at end of file diff --git a/src/allowed_hosts.rs b/src/allowed_hosts.rs index 417ac81..d5884eb 100644 --- a/src/allowed_hosts.rs +++ b/src/allowed_hosts.rs @@ -54,6 +54,9 @@ pub static CORE_ALLOWED_HOSTS: &[&str] = &[ // ===== Agency / Copilot configuration ===== "config.edge.skype.com", // Note: 168.63.129.16 (Azure DNS) is handled separately as it's an IP + // Note: host.docker.internal is NOT in CORE — it's always added by the + // standalone compiler in generate_allowed_domains (standalone always uses + // MCPG, which needs host access from the AWF container). ]; /// Hosts required by specific MCP servers. diff --git a/src/compile/common.rs b/src/compile/common.rs index 8f31404..914e98a 100644 --- a/src/compile/common.rs +++ b/src/compile/common.rs @@ -4,12 +4,12 @@ use anyhow::{Context, Result}; use super::types::{FrontMatter, McpConfig, Repository, TriggerConfig}; use crate::fuzzy_schedule; -use crate::mcp_metadata::McpMetadataFile; -/// Check if an MCP name is a built-in (launched via agency mcp) -pub fn is_builtin_mcp(name: &str) -> bool { - let metadata = McpMetadataFile::bundled(); - metadata.get(name).map(|m| m.builtin).unwrap_or(false) +/// Check if an MCP has a custom command (i.e., is not just a name-based reference). +/// All MCPs now require explicit command configuration — there are no built-in MCPs +/// in the copilot CLI. +pub fn is_custom_mcp(config: &McpConfig) -> bool { + matches!(config, McpConfig::WithOptions(opts) if opts.command.is_some()) } /// Parse the markdown file and extract front matter and body @@ -306,14 +306,9 @@ pub fn generate_copilot_params(front_matter: &FrontMatter) -> String { allowed_tools.push(format!("shell({})", cmd)); } - let metadata = McpMetadataFile::bundled(); - let mut disallowed_mcps: Vec<&str> = metadata.mcp_names(); - disallowed_mcps.sort(); - let mut params = Vec::new(); params.push(format!("--model {}", front_matter.engine.model())); - params.push("--disable-builtin-mcps".to_string()); params.push("--no-ask-user".to_string()); for tool in allowed_tools { @@ -326,26 +321,6 @@ pub fn generate_copilot_params(front_matter: &FrontMatter) -> String { } } - for mcp in disallowed_mcps { - params.push(format!("--disable-mcp-server {}", mcp)); - } - - for (name, config) in &front_matter.mcp_servers { - let is_custom = matches!(config, McpConfig::WithOptions(opts) if opts.command.is_some()); - if is_custom { - continue; - } - - let is_enabled = match config { - McpConfig::Enabled(enabled) => *enabled, - McpConfig::WithOptions(_) => true, - }; - - if is_enabled { - params.push(format!("--mcp {}", name)); - } - } - params.join(" ") } @@ -460,6 +435,15 @@ pub const DEFAULT_POOL: &str = "AZS-1ES-L-MMS-ubuntu-22.04"; /// See: https://github.com/github/gh-aw-firewall/releases pub const AWF_VERSION: &str = "0.23.1"; +/// Docker image and version for the MCP Gateway (gh-aw-mcpg). +/// Update this when upgrading to a new MCPG release. +/// See: https://github.com/github/gh-aw-mcpg/releases +pub const MCPG_VERSION: &str = "0.1.9"; +pub const MCPG_IMAGE: &str = "ghcr.io/github/gh-aw-mcpg"; + +/// Default port MCPG listens on inside the container (host network mode). +pub const MCPG_PORT: u16 = 80; + /// Generate source path for the execute command. /// /// Returns a path using `{{ workspace }}` as the base, which gets resolved @@ -604,7 +588,7 @@ mod tests { } #[test] - fn test_copilot_params_custom_mcp_not_added_with_mcp_flag() { + fn test_copilot_params_custom_mcp_not_in_params() { let mut fm = minimal_front_matter(); fm.mcp_servers.insert( "my-tool".to_string(), @@ -614,17 +598,20 @@ mod tests { }), ); let params = generate_copilot_params(&fm); - // Custom MCPs (with command) should NOT appear as --mcp flags - assert!(!params.contains("--mcp my-tool")); + // MCPs are handled by MCPG, not copilot CLI params + assert!(!params.contains("my-tool")); } #[test] - fn test_copilot_params_builtin_mcp_added_with_mcp_flag() { + fn test_copilot_params_no_mcp_flags() { let mut fm = minimal_front_matter(); fm.mcp_servers .insert("ado".to_string(), McpConfig::Enabled(true)); let params = generate_copilot_params(&fm); - assert!(params.contains("--mcp ado")); + // No --mcp or --disable-mcp-server flags — MCPs are handled by MCPG + assert!(!params.contains("--mcp")); + assert!(!params.contains("--disable-mcp-server")); + assert!(!params.contains("--disable-builtin-mcps")); } // ─── sanitize_filename ──────────────────────────────────────────────────── diff --git a/src/compile/onees.rs b/src/compile/onees.rs index b3f754c..bcbd867 100644 --- a/src/compile/onees.rs +++ b/src/compile/onees.rs @@ -21,7 +21,7 @@ use super::common::{ generate_checkout_self, generate_checkout_steps, generate_ci_trigger, generate_pipeline_path, generate_pipeline_resources, generate_pr_trigger, generate_repositories, generate_schedule, generate_source_path, - generate_working_directory, replace_with_indent, + generate_working_directory, is_custom_mcp, replace_with_indent, }; use super::types::{FrontMatter, McpConfig}; @@ -177,24 +177,47 @@ fn generate_agent_context_root(effective_workspace: &str) -> String { } } -/// Generate MCP configuration for 1ES templates +/// Generate MCP configuration for 1ES templates. +/// +/// In 1ES, MCPs require service connections. Only MCPs with explicit +/// `service_connection` configuration or custom commands are included. fn generate_mcp_configuration(mcps: &HashMap) -> String { let mut mcp_entries: Vec<_> = mcps .iter() .filter_map(|(name, config)| { let (is_enabled, opts) = match config { McpConfig::Enabled(enabled) => (*enabled, None), - McpConfig::WithOptions(o) => (o.command.is_none(), Some(o)), // Custom MCPs not supported + McpConfig::WithOptions(o) => (true, Some(o)), }; - if !is_enabled || !common::is_builtin_mcp(name) { + if !is_enabled { return None; } - // Use explicit service connection or generate default + // Custom MCPs with command: not supported in 1ES (needs service connection) + if is_custom_mcp(config) { + log::warn!( + "MCP '{}' uses custom command — not supported in 1ES target (requires service connection)", + name + ); + return None; + } + + // Use explicit service connection or generate default. + // Warn when falling back to the naming convention — the generated + // service connection reference may not exist in the ADO project. let service_connection = opts .and_then(|o| o.service_connection.clone()) - .unwrap_or_else(|| format!("mcp-{}-service-connection", name)); + .unwrap_or_else(|| { + let default = format!("mcp-{}-service-connection", name); + log::warn!( + "MCP '{}' has no explicit service connection in 1ES target — \ + assuming '{}' exists", + name, + default, + ); + default + }); Some((name.clone(), service_connection)) }) diff --git a/src/compile/standalone.rs b/src/compile/standalone.rs index 21dec18..2a5194c 100644 --- a/src/compile/standalone.rs +++ b/src/compile/standalone.rs @@ -14,15 +14,15 @@ use std::path::Path; use super::Compiler; use super::common::{ - self, AWF_VERSION, DEFAULT_POOL, compute_effective_workspace, generate_copilot_params, - generate_cancel_previous_builds, generate_checkout_self, generate_checkout_steps, - generate_ci_trigger, generate_pipeline_path, generate_pipeline_resources, generate_pr_trigger, - generate_repositories, generate_schedule, generate_source_path, generate_working_directory, - replace_with_indent, sanitize_filename, + self, AWF_VERSION, DEFAULT_POOL, MCPG_IMAGE, MCPG_PORT, MCPG_VERSION, + compute_effective_workspace, generate_copilot_params, generate_cancel_previous_builds, + generate_checkout_self, generate_checkout_steps, generate_ci_trigger, generate_pipeline_path, + generate_pipeline_resources, generate_pr_trigger, generate_repositories, generate_schedule, + generate_source_path, generate_working_directory, replace_with_indent, sanitize_filename, }; use super::types::{FrontMatter, McpConfig}; use crate::allowed_hosts::{CORE_ALLOWED_HOSTS, mcp_required_hosts}; -use crate::mcp_firewall::{FirewallConfig, UpstreamConfig}; +use serde::Serialize; use std::collections::HashSet; /// Standalone pipeline compiler. @@ -124,6 +124,8 @@ impl Compiler for StandaloneCompiler { let replacements: Vec<(&str, &str)> = vec![ ("{{ compiler_version }}", compiler_version), ("{{ firewall_version }}", AWF_VERSION), + ("{{ mcpg_version }}", MCPG_VERSION), + ("{{ mcpg_image }}", MCPG_IMAGE), ("{{ pool }}", &pool), ("{{ setup_job }}", &setup_job), ("{{ teardown_job }}", &teardown_job), @@ -158,19 +160,16 @@ impl Compiler for StandaloneCompiler { replace_with_indent(&yaml, placeholder, replacement) }); - // Generate MCP firewall config JSON - let firewall_config_json = if !front_matter.mcp_servers.is_empty() { - let config = generate_firewall_config(front_matter); - serde_json::to_string_pretty(&config) - .unwrap_or_else(|_| r#"{"upstreams":{}}"#.to_string()) - } else { - r#"{"upstreams":{}}"#.to_string() - }; + // Always generate MCPG config — safeoutputs is always required regardless + // of whether additional mcp-servers are configured in front matter. + let config = generate_mcpg_config(front_matter); + let mcpg_config_json = serde_json::to_string_pretty(&config) + .context("Failed to serialize MCPG config")?; let pipeline_yaml = replace_with_indent( &pipeline_yaml, - "{{ firewall_config }}", - &firewall_config_json, + "{{ mcpg_config }}", + &mcpg_config_json, ); Ok(pipeline_yaml) @@ -215,6 +214,11 @@ fn generate_allowed_domains(front_matter: &FrontMatter) -> String { hosts.insert((*host).to_string()); } + // Add host.docker.internal — required for the AWF container to reach + // MCPG and SafeOutputs on the host. Only added for standalone pipelines + // that always use MCPG. + hosts.insert("host.docker.internal".to_string()); + // Add MCP-specific hosts for mcp in &enabled_mcps { for host in mcp_required_hosts(mcp) { @@ -331,11 +335,86 @@ fn generate_agentic_depends_on(setup_steps: &[serde_yaml::Value]) -> String { } } -/// Generate MCP firewall configuration from front matter -pub fn generate_firewall_config(front_matter: &FrontMatter) -> FirewallConfig { - let mut upstreams = HashMap::new(); +/// MCPG server configuration for a single MCP upstream. +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct McpgServerConfig { + /// Server type: "stdio" for command-based, "http" for HTTP backends + #[serde(rename = "type")] + pub server_type: String, + /// Command to run (for stdio type) + #[serde(skip_serializing_if = "Option::is_none")] + pub command: Option, + /// Command arguments (for stdio type) + #[serde(skip_serializing_if = "Option::is_none")] + pub args: Option>, + /// URL for HTTP backends + #[serde(skip_serializing_if = "Option::is_none")] + pub url: Option, + /// HTTP headers (e.g., Authorization) + #[serde(skip_serializing_if = "Option::is_none")] + pub headers: Option>, + /// Environment variables for the server process + #[serde(skip_serializing_if = "Option::is_none")] + pub env: Option>, + /// Tool allow-list (if empty or absent, all tools are allowed) + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option>, +} + +/// MCPG gateway configuration. +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct McpgGatewayConfig { + pub port: u16, + pub domain: String, + pub api_key: String, + pub payload_dir: String, +} + +/// Top-level MCPG configuration. +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct McpgConfig { + pub mcp_servers: HashMap, + pub gateway: McpgGatewayConfig, +} + +/// Generate MCPG configuration from front matter. +/// +/// Converts the front matter `mcp-servers` definitions into MCPG-compatible JSON. +/// SafeOutputs is always included as an HTTP backend. Custom MCPs with explicit +/// `command:` are included as stdio servers. +pub fn generate_mcpg_config(front_matter: &FrontMatter) -> McpgConfig { + let mut mcp_servers = HashMap::new(); + + // SafeOutputs is always included as an HTTP backend. + // The actual URL/key are replaced at runtime by the pipeline template. + mcp_servers.insert( + "safeoutputs".to_string(), + McpgServerConfig { + server_type: "http".to_string(), + command: None, + args: None, + url: Some("http://host.docker.internal:${SAFE_OUTPUTS_PORT}/mcp".to_string()), + headers: Some(HashMap::from([( + "Authorization".to_string(), + "Bearer ${SAFE_OUTPUTS_API_KEY}".to_string(), + )])), + env: None, + tools: None, + }, + ); for (name, config) in &front_matter.mcp_servers { + // Prevent user-defined MCPs from overwriting the reserved safeoutputs backend + if name == "safeoutputs" { + log::warn!( + "MCP name 'safeoutputs' is reserved for the safe outputs HTTP backend — skipping" + ); + continue; + } + let (is_enabled, options) = match config { McpConfig::Enabled(enabled) => (*enabled, None), McpConfig::WithOptions(opts) => (true, Some(opts)), @@ -345,66 +424,58 @@ pub fn generate_firewall_config(front_matter: &FrontMatter) -> FirewallConfig { continue; } - let upstream = if let Some(opts) = options { + if let Some(opts) = options { if let Some(command) = &opts.command { - // Custom MCP with explicit command - UpstreamConfig { - command: command.clone(), - args: opts.args.clone(), - env: opts.env.clone(), - allowed: if opts.allowed.is_empty() { - vec!["*".to_string()] - } else { - opts.allowed.clone() - }, - spawn_timeout_secs: 30, - } - } else if common::is_builtin_mcp(name) { - // Built-in MCP with options - let mut args = vec!["mcp".to_string(), name.clone()]; - args.extend(opts.args.clone()); - - UpstreamConfig { - command: "agency".to_string(), - args, - env: opts.env.clone(), - allowed: if opts.allowed.is_empty() { - vec!["*".to_string()] - } else { - opts.allowed.clone() + // Custom MCP with explicit command → stdio server + let args = if opts.args.is_empty() { + None + } else { + Some(opts.args.clone()) + }; + let env = if opts.env.is_empty() { + None + } else { + Some(opts.env.clone()) + }; + let tools = if opts.allowed.is_empty() { + None + } else { + Some(opts.allowed.clone()) + }; + mcp_servers.insert( + name.clone(), + McpgServerConfig { + server_type: "stdio".to_string(), + command: Some(command.clone()), + args, + url: None, + headers: None, + env, + tools, }, - spawn_timeout_secs: 30, - } + ); } else { log::warn!( - "MCP '{}' has no command and is not a built-in - skipping", + "MCP '{}' has no command — skipping (no built-in MCPs available)", name ); - continue; - } - } else if common::is_builtin_mcp(name) { - // Built-in MCP with simple enablement - UpstreamConfig { - command: "agency".to_string(), - args: vec!["mcp".to_string(), name.clone()], - env: HashMap::new(), - allowed: vec!["*".to_string()], - spawn_timeout_secs: 30, } } else { log::warn!( - "MCP '{}' is not a built-in and has no command - skipping", + "MCP '{}' has no command — skipping (no built-in MCPs available)", name ); - continue; - }; - - upstreams.insert(name.clone(), upstream); + } } - FirewallConfig { - upstreams, - metadata_path: None, + McpgConfig { + mcp_servers, + gateway: McpgGatewayConfig { + port: MCPG_PORT, + domain: "host.docker.internal".to_string(), + api_key: "${MCP_GATEWAY_API_KEY}".to_string(), + payload_dir: "/tmp/gh-aw/mcp-payloads".to_string(), + }, } } @@ -513,39 +584,16 @@ mod tests { } #[test] - fn test_generate_firewall_config_builtin_simple_enabled() { - let mut fm = minimal_front_matter(); - fm.mcp_servers - .insert("ado".to_string(), McpConfig::Enabled(true)); - let config = generate_firewall_config(&fm); - let upstream = config.upstreams.get("ado").unwrap(); - assert_eq!(upstream.command, "agency"); - assert_eq!(upstream.args, vec!["mcp", "ado"]); - assert_eq!(upstream.allowed, vec!["*"]); - } - - #[test] - fn test_generate_firewall_config_builtin_with_allowed_list() { - let mut fm = minimal_front_matter(); - fm.mcp_servers.insert( - "icm".to_string(), - McpConfig::WithOptions(McpOptions { - allowed: vec!["create_incident".to_string(), "get_incident".to_string()], - ..Default::default() - }), - ); - let config = generate_firewall_config(&fm); - let upstream = config.upstreams.get("icm").unwrap(); - assert_eq!(upstream.command, "agency"); - assert_eq!(upstream.args, vec!["mcp", "icm"]); - assert_eq!( - upstream.allowed, - vec!["create_incident".to_string(), "get_incident".to_string()] - ); + fn test_generate_mcpg_config_always_includes_safeoutputs() { + let fm = minimal_front_matter(); + let config = generate_mcpg_config(&fm); + let so = config.mcp_servers.get("safeoutputs").unwrap(); + assert_eq!(so.server_type, "http"); + assert!(so.url.as_ref().unwrap().contains("host.docker.internal")); } #[test] - fn test_generate_firewall_config_custom_mcp() { + fn test_generate_mcpg_config_custom_mcp() { let mut fm = minimal_front_matter(); fm.mcp_servers.insert( "my-tool".to_string(), @@ -556,52 +604,52 @@ mod tests { ..Default::default() }), ); - let config = generate_firewall_config(&fm); - let upstream = config.upstreams.get("my-tool").unwrap(); - assert_eq!(upstream.command, "node"); - assert_eq!(upstream.args, vec!["server.js"]); - assert_eq!(upstream.allowed, vec!["do_thing"]); - } - - #[test] - fn test_generate_firewall_config_custom_mcp_empty_allowed_defaults_to_wildcard() { - let mut fm = minimal_front_matter(); - fm.mcp_servers.insert( - "my-tool".to_string(), - McpConfig::WithOptions(McpOptions { - command: Some("python".to_string()), - allowed: vec![], - ..Default::default() - }), + let config = generate_mcpg_config(&fm); + let server = config.mcp_servers.get("my-tool").unwrap(); + assert_eq!(server.server_type, "stdio"); + assert_eq!(server.command.as_ref().unwrap(), "node"); + assert_eq!(server.args.as_ref().unwrap(), &vec!["server.js"]); + assert_eq!( + server.tools.as_ref().unwrap(), + &vec!["do_thing".to_string()] ); - let config = generate_firewall_config(&fm); - let upstream = config.upstreams.get("my-tool").unwrap(); - assert_eq!(upstream.allowed, vec!["*"]); } #[test] - fn test_generate_firewall_config_unknown_non_builtin_skipped() { - // An MCP that is neither built-in nor has a command should be skipped + fn test_generate_mcpg_config_mcp_without_command_skipped() { let mut fm = minimal_front_matter(); + // An MCP with no command should be skipped (no built-in MCPs) fm.mcp_servers .insert("phantom".to_string(), McpConfig::Enabled(true)); - let config = generate_firewall_config(&fm); - assert!(!config.upstreams.contains_key("phantom")); + let config = generate_mcpg_config(&fm); + assert!(!config.mcp_servers.contains_key("phantom")); + // safeoutputs is always present + assert!(config.mcp_servers.contains_key("safeoutputs")); } #[test] - fn test_generate_firewall_config_disabled_mcp_skipped() { + fn test_generate_mcpg_config_disabled_mcp_skipped() { let mut fm = minimal_front_matter(); fm.mcp_servers - .insert("ado".to_string(), McpConfig::Enabled(false)); - let config = generate_firewall_config(&fm); - assert!(!config.upstreams.contains_key("ado")); + .insert("my-tool".to_string(), McpConfig::Enabled(false)); + let config = generate_mcpg_config(&fm); + assert!(!config.mcp_servers.contains_key("my-tool")); + } + + #[test] + fn test_generate_mcpg_config_empty_mcp_servers() { + let fm = minimal_front_matter(); + let config = generate_mcpg_config(&fm); + // Only safeoutputs should be present + assert_eq!(config.mcp_servers.len(), 1); + assert!(config.mcp_servers.contains_key("safeoutputs")); } #[test] - fn test_generate_firewall_config_empty_mcp_servers() { + fn test_generate_mcpg_config_gateway_defaults() { let fm = minimal_front_matter(); - let config = generate_firewall_config(&fm); - assert!(config.upstreams.is_empty()); + let config = generate_mcpg_config(&fm); + assert_eq!(config.gateway.port, 80); + assert_eq!(config.gateway.domain, "host.docker.internal"); } } diff --git a/src/create.rs b/src/create.rs index 3cdfd75..67e6aa8 100644 --- a/src/create.rs +++ b/src/create.rs @@ -5,7 +5,6 @@ use std::fmt; use std::path::PathBuf; use crate::compile::sanitize_filename; -use crate::mcp_metadata::McpMetadataFile; /// Available AI models for agent configuration const AVAILABLE_MODELS: &[&str] = &[ @@ -132,7 +131,6 @@ pub async fn create_agent(output_dir: Option) -> Result<()> { let mut config = AgentConfig::default(); let mut step = WizardStep::Name; - let mcp_metadata = McpMetadataFile::bundled(); loop { match step { @@ -265,7 +263,7 @@ pub async fn create_agent(output_dir: Option) -> Result<()> { } WizardStep::Mcps => { - match prompt_mcps_with_back(&mcp_metadata, &mut step)? { + match prompt_mcps_with_back(&mut step)? { Some(mcps) => { config.mcps = mcps; step = step.next(); @@ -566,110 +564,64 @@ fn prompt_schedule_with_back(step: &mut WizardStep) -> Result Result>> { - use std::collections::{HashMap, HashSet}; - use terminal_size::{Height, Width, terminal_size}; - - // Get terminal dimensions for dynamic sizing - let (term_width, term_height) = terminal_size() - .map(|(Width(w), Height(h))| (w as usize, h as usize)) - .unwrap_or((80, 24)); - - let page_size = term_height.saturating_sub(10).max(5).min(30); - - let builtin_mcps = metadata.builtin_mcp_names(); - - let mut all_tools: Vec = Vec::new(); - for mcp_name in &builtin_mcps { - if let Some(mcp) = metadata.get(mcp_name) { - for tool in &mcp.tools { - all_tools.push(McpToolOption { - mcp_name: mcp_name.to_string(), - tool_name: tool.name.clone(), - description: tool.description.clone().unwrap_or_default(), - max_width: term_width, - }); - } +/// Prompt for MCPs with back navigation. +/// +/// There are no built-in MCPs — all MCPs require explicit command configuration. +/// The wizard collects custom MCP names; command/args are configured in the +/// generated markdown front matter. +fn prompt_mcps_with_back(step: &mut WizardStep) -> Result>> { + println!("\n🔧 MCP Server Configuration"); + println!("Add custom MCP servers. Each requires a command and args in the front matter."); + println!("You can add MCP servers later by editing the generated markdown file.\n"); + + let add_mcps = match Confirm::new("Would you like to add any custom MCP servers?") + .with_default(false) + .prompt() + { + Ok(val) => val, + Err(InquireError::OperationCanceled) => { + *step = step.prev(); + return Ok(None); } - } - - all_tools.sort_by(|a, b| a.full_name().cmp(&b.full_name())); - - let total_tools = all_tools.len(); - let total_mcps = builtin_mcps.len(); - - println!("\n🔧 MCP Tool Selection"); - println!("Select tools to enable. Tools are shown as mcp:tool_name."); - println!("Type to search/filter, Space to toggle, Enter to confirm, Esc to go back."); - println!("({} tools across {} MCPs)\n", total_tools, total_mcps); - - let prompt = MultiSelect::new("Select tools to enable:", all_tools) - .with_help_message( - "Type to filter (e.g., 'ado:' or 'work_item'), Space to toggle, Enter to confirm", - ) - .with_page_size(page_size) - .prompt(); + Err(InquireError::OperationInterrupted) => { + anyhow::bail!("Wizard interrupted"); + } + Err(e) => return Err(e).context("Failed to prompt for MCPs"), + }; - match prompt { - Ok(selected) => { - if selected.is_empty() { - return Ok(Some(Vec::new())); - } + if !add_mcps { + return Ok(Some(Vec::new())); + } - let mut mcp_tools: HashMap> = HashMap::new(); - for tool in selected { - mcp_tools - .entry(tool.mcp_name.clone()) - .or_default() - .insert(tool.tool_name); + let mut selections = Vec::new(); + loop { + let name = match Text::new("MCP server name (or empty to finish):") + .with_help_message("e.g., my-custom-tool") + .prompt() + { + Ok(name) if name.trim().is_empty() => break, + Ok(name) => name.trim().to_string(), + Err(InquireError::OperationCanceled) => break, + Err(InquireError::OperationInterrupted) => { + anyhow::bail!("Wizard interrupted"); } + Err(e) => return Err(e).context("Failed to read MCP name"), + }; - let mut mcp_selections: Vec = mcp_tools - .into_iter() - .map(|(mcp_name, selected_tools)| { - let total_for_mcp = metadata.get(&mcp_name).map(|m| m.tools.len()).unwrap_or(0); - - if selected_tools.len() == total_for_mcp { - McpSelection { - name: mcp_name, - allowed_tools: None, - } - } else { - let mut tools: Vec = selected_tools.into_iter().collect(); - tools.sort(); - McpSelection { - name: mcp_name, - allowed_tools: Some(tools), - } - } - }) - .collect(); - - mcp_selections.sort_by(|a, b| a.name.cmp(&b.name)); - - println!("\n📋 Selected {} MCPs:", mcp_selections.len()); - for mcp in &mcp_selections { - match &mcp.allowed_tools { - None => println!(" {} (all tools)", mcp.name), - Some(tools) => println!(" {} ({} tools)", mcp.name, tools.len()), - } - } + selections.push(McpSelection { + name, + allowed_tools: None, + }); + } - Ok(Some(mcp_selections)) - } - Err(InquireError::OperationCanceled) => { - *step = step.prev(); - Ok(None) + if !selections.is_empty() { + println!("\n📋 Added {} custom MCP(s):", selections.len()); + for mcp in &selections { + println!(" {} (configure command/args in front matter)", mcp.name); } - Err(InquireError::OperationInterrupted) => { - anyhow::bail!("Wizard interrupted"); - } - Err(e) => Err(e).context("Failed to select tools"), } + + Ok(Some(selections)) } /// Workspace option for display @@ -905,49 +857,6 @@ fn prompt_custom_schedule() -> Result> { } } -/// MCP tool option for flat list display (mcp:tool format) -struct McpToolOption { - mcp_name: String, - tool_name: String, - description: String, - /// Maximum width for the display (set based on terminal width) - max_width: usize, -} - -impl McpToolOption { - fn full_name(&self) -> String { - format!("{}:{}", self.mcp_name, self.tool_name) - } -} - -impl fmt::Display for McpToolOption { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let full_name = self.full_name(); - if self.description.is_empty() { - write!(f, "{}", full_name) - } else { - // Calculate available space for description - // Format is: "mcp:tool_name: description" - // Account for ": " separator (2 chars) and margin for inquire's UI - let prefix_len = full_name.len() + 2; - let margin = 6; // Space for checkbox, cursor, and padding - let available = self.max_width.saturating_sub(prefix_len + margin); - - if available < 10 { - // Not enough space for description, just show the name - write!(f, "{}", full_name) - } else { - let desc = if self.description.len() > available { - format!("{}...", &self.description[..available.saturating_sub(3)]) - } else { - self.description.clone() - }; - write!(f, "{}: {}", full_name, desc) - } - } - } -} - /// Generate the markdown file content from the configuration fn generate_markdown(config: &AgentConfig) -> String { let mut yaml_parts = Vec::new(); diff --git a/src/main.rs b/src/main.rs index 3c39a58..b1442e6 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,8 +5,6 @@ mod execute; mod fuzzy_schedule; mod logging; mod mcp; -mod mcp_firewall; -mod mcp_metadata; mod ndjson; mod proxy; pub mod sanitize; @@ -73,11 +71,18 @@ enum Commands { #[arg(long = "allow")] allowed_hosts: Vec, }, - /// Start an MCP firewall server that proxies and filters tool calls to upstream MCPs - McpFirewall { - /// Path to the firewall configuration JSON file - #[arg(short, long)] - config: PathBuf, + /// Run SafeOutputs MCP server over HTTP (for MCPG integration) + McpHttp { + /// Port to listen on + #[arg(long, default_value = "8100")] + port: u16, + /// API key for authentication (if not provided, one is generated) + #[arg(long)] + api_key: Option, + /// Directory for safe output files + output_directory: String, + /// Guard against directory traversal attacks + bounding_directory: String, }, } @@ -106,7 +111,7 @@ async fn main() -> Result<()> { Some(Commands::Mcp { .. }) => "mcp", Some(Commands::Execute { .. }) => "execute", Some(Commands::Proxy { .. }) => "proxy", - Some(Commands::McpFirewall { .. }) => "mcp-firewall", + Some(Commands::McpHttp { .. }) => "mcp-http", None => "ado-aw", }; @@ -228,8 +233,14 @@ async fn main() -> Result<()> { #[cfg(windows)] std::future::pending::<()>().await; } - Commands::McpFirewall { config } => { - mcp_firewall::run(&config).await?; + Commands::McpHttp { + port, + api_key, + output_directory, + bounding_directory, + } => { + mcp::run_http(&output_directory, &bounding_directory, port, api_key.as_deref()) + .await?; } } } else { diff --git a/src/mcp.rs b/src/mcp.rs index 4cf4525..30a19fd 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -434,6 +434,123 @@ pub async fn run(output_directory: &str, bounding_directory: &str) -> Result<()> Ok(()) } +/// Run SafeOutputs MCP server over HTTP using the Streamable HTTP protocol. +/// +/// This is used for MCPG integration: the gateway connects to this server as an +/// HTTP backend and proxies tool calls from the agent. +pub async fn run_http( + output_directory: &str, + bounding_directory: &str, + port: u16, + api_key: Option<&str>, +) -> Result<()> { + use axum::Router; + use rmcp::transport::streamable_http_server::{ + StreamableHttpServerConfig, StreamableHttpService, + session::local::LocalSessionManager, + }; + use std::sync::Arc; + + let bounding = bounding_directory.to_string(); + let output = output_directory.to_string(); + + // Generate or use provided API key. + // In production the pipeline always passes --api-key with a cryptographically + // random value; this fallback covers dev/test invocations. + let api_key = api_key + .map(|k| k.to_string()) + .unwrap_or_else(|| { + let mut buf = [0u8; 32]; + std::fs::File::open("/dev/urandom") + .and_then(|mut f| { + use std::io::Read; + f.read_exact(&mut buf) + }) + .unwrap_or_else(|_| { + // Last-resort fallback if /dev/urandom is unavailable + use std::time::{SystemTime, UNIX_EPOCH}; + let seed = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_nanos(); + buf[..16].copy_from_slice(&seed.to_le_bytes()); + buf[16..].copy_from_slice(&seed.wrapping_mul(0x517cc1b727220a95).to_le_bytes()); + }); + buf.iter().map(|b| format!("{:02x}", b)).collect() + }); + + info!("Starting SafeOutputs HTTP server on port {}", port); + + let config = StreamableHttpServerConfig { + sse_keep_alive: Some(std::time::Duration::from_secs(15)), + stateful_mode: true, + }; + + let session_manager = Arc::new(LocalSessionManager::default()); + + // Pre-initialize SafeOutputs once and share via clone. + // The factory closure runs on a Tokio worker thread, so we cannot + // use block_on() inside it — that would panic with "Cannot start + // a runtime from within a runtime". + let safe_outputs_template = SafeOutputs::new(&bounding, &output).await?; + let mcp_service = StreamableHttpService::new( + move || Ok(safe_outputs_template.clone()), + session_manager, + config, + ); + + // Wrap with API key auth middleware + let expected_key = api_key.clone(); + let app = Router::new() + .route("/health", axum::routing::get(|| async { "ok" })) + .route( + "/mcp", + axum::routing::post(axum::routing::any_service(mcp_service.clone())) + .get(axum::routing::any_service(mcp_service.clone())) + .delete(axum::routing::any_service(mcp_service)), + ) + .layer(axum::middleware::from_fn(move |req: axum::extract::Request, next: axum::middleware::Next| { + let expected = expected_key.clone(); + async move { + // Skip auth for health endpoint + if req.uri().path() == "/health" { + return next.run(req).await; + } + + // Check Bearer token + if let Some(auth) = req.headers().get("authorization") { + if let Ok(auth_str) = auth.to_str() { + if auth_str == format!("Bearer {}", expected) { + return next.run(req).await; + } + } + } + + axum::response::Response::builder() + .status(401) + .body(axum::body::Body::from("Unauthorized")) + .unwrap() + } + })); + + let addr = std::net::SocketAddr::from(([0, 0, 0, 0], port)); + let listener = tokio::net::TcpListener::bind(addr).await?; + info!("SafeOutputs HTTP server listening on {}", addr); + + // Print port for pipeline capture (key is already known by the caller) + println!("SAFE_OUTPUTS_PORT={}", port); + log::debug!("SafeOutputs API key configured (not printed for security)"); + + axum::serve(listener, app) + .with_graceful_shutdown(async { + tokio::signal::ctrl_c().await.ok(); + info!("SafeOutputs HTTP server shutting down"); + }) + .await?; + + Ok(()) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/mcp_firewall.rs b/src/mcp_firewall.rs deleted file mode 100644 index dee4327..0000000 --- a/src/mcp_firewall.rs +++ /dev/null @@ -1,776 +0,0 @@ -//! MCP Firewall - A filtering proxy for Model Context Protocol servers -//! -//! The firewall acts as a single MCP server that: -//! 1. Loads tool definitions from pre-generated metadata (mcp-metadata.json) -//! 2. Exposes only allowed tools (namespaced as `upstream:tool_name`) -//! 3. Spawns upstream MCP servers lazily when tools are called -//! 4. Routes tool calls to the appropriate upstream -//! 5. Logs all tool call attempts for auditing - -use anyhow::{Context, Result}; -use log::{debug, error, info, warn}; -use rmcp::{ - ErrorData as McpError, RoleServer, ServerHandler, ServiceExt, model::*, - service::RequestContext, transport::stdio, -}; -use serde::{Deserialize, Serialize}; -use std::borrow::Cow; -use std::collections::HashMap; -use std::path::PathBuf; -use std::process::Stdio; -use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; -use tokio::process::{Child, Command}; -use tokio::sync::RwLock; - -use crate::mcp_metadata::{McpMetadataFile, ToolMetadata}; - -// ============================================================================ -// Configuration -// ============================================================================ - -/// Configuration for a single upstream MCP server -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct UpstreamConfig { - /// Command to spawn the MCP server - pub command: String, - /// Arguments to pass to the command - #[serde(default)] - pub args: Vec, - /// Environment variables for the MCP server process - #[serde(default)] - pub env: HashMap, - /// List of allowed tool names (without namespace prefix) - /// Use ["*"] to allow all tools - pub allowed: Vec, - /// Timeout in seconds for spawning and initializing the upstream MCP server - /// Defaults to 30 seconds if not specified - #[serde(default = "default_spawn_timeout")] - pub spawn_timeout_secs: u64, -} - -fn default_spawn_timeout() -> u64 { - 30 -} - -impl UpstreamConfig { - /// Check if a tool name is allowed by this upstream's policy - pub fn is_tool_allowed(&self, tool_name: &str) -> bool { - self.allowed.iter().any(|pattern| { - if pattern == "*" { - true - } else if pattern.ends_with('*') { - // Prefix wildcard: "get_*" matches "get_incident", "get_user" - let prefix = &pattern[..pattern.len() - 1]; - tool_name.starts_with(prefix) - } else { - pattern == tool_name - } - }) - } -} - -/// Full firewall configuration -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct FirewallConfig { - /// Map of upstream name to configuration - pub upstreams: HashMap, - /// Path to MCP metadata file (optional, uses bundled metadata if not provided) - #[serde(skip_serializing_if = "Option::is_none")] - pub metadata_path: Option, -} - -impl FirewallConfig { - /// Load configuration from a JSON file - pub fn from_file(path: &PathBuf) -> Result { - let content = std::fs::read_to_string(path) - .with_context(|| format!("Failed to read config file: {}", path.display()))?; - serde_json::from_str(&content) - .with_context(|| format!("Failed to parse config file: {}", path.display())) - } - - /// Load MCP metadata (from file if specified, otherwise bundled) - pub fn load_metadata(&self) -> Result { - if let Some(ref path) = self.metadata_path { - let content = std::fs::read_to_string(path) - .with_context(|| format!("Failed to read metadata file: {}", path.display()))?; - serde_json::from_str(&content) - .with_context(|| format!("Failed to parse metadata file: {}", path.display())) - } else { - Ok(McpMetadataFile::bundled()) - } - } - - /// Create a new empty configuration - pub fn new() -> Self { - Self { - upstreams: HashMap::new(), - metadata_path: None, - } - } -} - -impl Default for FirewallConfig { - fn default() -> Self { - Self::new() - } -} - -// ============================================================================ -// Upstream MCP Client -// ============================================================================ - -/// A connection to an upstream MCP server (spawned lazily) -struct UpstreamConnection { - name: String, - #[allow(dead_code)] - child: Child, - stdin: tokio::process::ChildStdin, - stdout_reader: BufReader, - request_id: u64, -} - -impl UpstreamConnection { - /// Spawn and initialize an upstream MCP server with timeout - async fn spawn(name: String, config: &UpstreamConfig) -> Result { - let timeout_duration = std::time::Duration::from_secs(config.spawn_timeout_secs); - let start_time = std::time::Instant::now(); - - info!( - "[{}] Spawning upstream MCP server (timeout: {}s)", - name, config.spawn_timeout_secs - ); - - // Wrap the entire spawn+initialize sequence in a timeout - let result = tokio::time::timeout(timeout_duration, async { - let mut cmd = Command::new(&config.command); - cmd.args(&config.args); - - for (key, value) in &config.env { - cmd.env(key, value); - } - - cmd.stdin(Stdio::piped()); - cmd.stdout(Stdio::piped()); - cmd.stderr(Stdio::inherit()); // Let upstream errors flow to our stderr - - let mut child = cmd.spawn().with_context(|| { - format!("Failed to spawn upstream '{}': {}", name, config.command) - })?; - - let stdin = child.stdin.take().ok_or_else(|| { - anyhow::anyhow!("Failed to capture stdin for upstream '{}'", name) - })?; - let stdout = child.stdout.take().ok_or_else(|| { - anyhow::anyhow!("Failed to capture stdout for upstream '{}'", name) - })?; - - let mut conn = Self { - name: name.clone(), - child, - stdin, - stdout_reader: BufReader::new(stdout), - request_id: 0, - }; - - // Initialize the MCP connection - conn.initialize().await?; - - Ok::(conn) - }) - .await; - - let duration = start_time.elapsed(); - - match result { - Ok(Ok(conn)) => { - info!( - "[{}] Successfully spawned and initialized in {:.2}s", - name, - duration.as_secs_f64() - ); - Ok(conn) - } - Ok(Err(e)) => { - error!( - "[{}] Failed to spawn/initialize after {:.2}s: {}", - name, - duration.as_secs_f64(), - e - ); - Err(e) - } - Err(_) => { - error!( - "[{}] Spawn timeout after {:.2}s (limit: {}s)", - name, - duration.as_secs_f64(), - config.spawn_timeout_secs - ); - anyhow::bail!( - "Timeout spawning upstream '{}' after {}s. The MCP server may be hanging during initialization. \ - Check that the command '{}' is responsive and properly configured.", - name, - config.spawn_timeout_secs, - config.command - ) - } - } - } - - /// Send a JSON-RPC request and wait for response - async fn send_request( - &mut self, - method: &str, - params: Option, - ) -> Result { - self.request_id += 1; - let id = self.request_id; - - let request = if let Some(p) = params { - serde_json::json!({ - "jsonrpc": "2.0", - "id": id, - "method": method, - "params": p - }) - } else { - serde_json::json!({ - "jsonrpc": "2.0", - "id": id, - "method": method - }) - }; - - let request_str = serde_json::to_string(&request)?; - debug!("[{}] Sending: {}", self.name, request_str); - - self.stdin.write_all(request_str.as_bytes()).await?; - self.stdin.write_all(b"\n").await?; - self.stdin.flush().await?; - - // Read response - let mut line = String::new(); - self.stdout_reader - .read_line(&mut line) - .await - .with_context(|| format!("Failed to read response from upstream '{}'", self.name))?; - - debug!("[{}] Received: {}", self.name, line.trim()); - - let response: serde_json::Value = serde_json::from_str(&line).with_context(|| { - format!( - "Failed to parse response from upstream '{}': {}", - self.name, line - ) - })?; - - // Check for error - if let Some(error) = response.get("error") { - anyhow::bail!("Upstream '{}' returned error: {}", self.name, error); - } - - Ok(response - .get("result") - .cloned() - .unwrap_or(serde_json::Value::Null)) - } - - /// Initialize the MCP connection - async fn initialize(&mut self) -> Result<()> { - let params = serde_json::json!({ - "protocolVersion": "2024-11-05", - "capabilities": { - "tools": {} - }, - "clientInfo": { - "name": "mcp-firewall", - "version": env!("CARGO_PKG_VERSION") - } - }); - - let result = self.send_request("initialize", Some(params)).await?; - info!( - "[{}] Initialized: {:?}", - self.name, - result.get("serverInfo") - ); - - // Send initialized notification - let notification = serde_json::json!({ - "jsonrpc": "2.0", - "method": "notifications/initialized" - }); - let notification_str = serde_json::to_string(¬ification)?; - self.stdin.write_all(notification_str.as_bytes()).await?; - self.stdin.write_all(b"\n").await?; - self.stdin.flush().await?; - - Ok(()) - } - - /// Call a tool on this upstream - async fn call_tool( - &mut self, - tool_name: &str, - arguments: Option>, - ) -> Result { - let params = serde_json::json!({ - "name": tool_name, - "arguments": arguments.unwrap_or_default() - }); - - let result = self.send_request("tools/call", Some(params)).await?; - - // Parse the result into CallToolResult - let content: Vec = - if let Some(content_array) = result.get("content").and_then(|c| c.as_array()) { - content_array - .iter() - .filter_map(|c| serde_json::from_value(c.clone()).ok()) - .collect() - } else { - vec![] - }; - - let is_error = result - .get("isError") - .and_then(|e| e.as_bool()) - .unwrap_or(false); - - let mut result = CallToolResult::success(content); - result.is_error = Some(is_error); - Ok(result) - } -} - -// ============================================================================ -// MCP Firewall Server -// ============================================================================ - -/// Policy for the MCP firewall -#[derive(Debug, Clone)] -pub struct FirewallPolicy { - pub config: FirewallConfig, -} - -/// The MCP Firewall server -pub struct McpFirewall { - /// Upstream configs (for lazy spawning) - upstream_configs: HashMap, - /// Lazily spawned upstream connections - upstreams: RwLock>, - /// Combined list of all allowed tools (namespaced) - tools: Vec, -} - -impl McpFirewall { - /// Create the firewall from policy and metadata - pub fn new(policy: FirewallPolicy) -> Result { - // Load metadata - let metadata = policy.config.load_metadata()?; - let mut all_tools = Vec::new(); - - // Build tool list from metadata (filtered by allowed list) - for (upstream_name, upstream_config) in &policy.config.upstreams { - if let Some(mcp_meta) = metadata.get(upstream_name) { - for tool_meta in &mcp_meta.tools { - // Check if this tool is allowed - if upstream_config.is_tool_allowed(&tool_meta.name) { - all_tools.push(Self::metadata_to_tool(upstream_name, tool_meta)); - } - } - info!( - "[{}] Loaded {} tools from metadata ({} allowed)", - upstream_name, - mcp_meta.tools.len(), - all_tools - .iter() - .filter(|t| t.name.starts_with(&format!("{}:", upstream_name))) - .count() - ); - } else { - warn!( - "[{}] No metadata found - tools will be unavailable", - upstream_name - ); - } - } - - info!( - "MCP Firewall initialized with {} upstreams, {} total tools (lazy spawning enabled)", - policy.config.upstreams.len(), - all_tools.len() - ); - - Ok(Self { - upstream_configs: policy.config.upstreams.clone(), - upstreams: RwLock::new(HashMap::new()), - tools: all_tools, - }) - } - - /// Convert ToolMetadata to rmcp Tool with namespace prefix - fn metadata_to_tool(upstream_name: &str, meta: &ToolMetadata) -> Tool { - Tool { - name: Cow::Owned(format!("{}:{}", upstream_name, meta.name)), - description: meta.description.clone().map(Cow::Owned), - input_schema: meta - .input_schema - .clone() - .and_then(|v| serde_json::from_value(v).ok()) - .unwrap_or_default(), - annotations: None, - icons: None, - output_schema: None, - title: None, - } - } - - /// Get or spawn an upstream connection - async fn get_or_spawn_upstream(&self, upstream_name: &str) -> Result<(), McpError> { - // Fast path: check if already spawned with read lock - { - let upstreams = self.upstreams.read().await; - if upstreams.contains_key(upstream_name) { - return Ok(()); - } - } - - // Need to spawn - get config first - let config = self.upstream_configs.get(upstream_name).ok_or_else(|| { - McpError::invalid_params(format!("Unknown upstream: '{}'", upstream_name), None) - })?; - - info!("[{}] Spawning upstream MCP server (lazy)", upstream_name); - - // Spawn the connection outside of any locks (this is the expensive operation) - let conn = UpstreamConnection::spawn(upstream_name.to_string(), config) - .await - .map_err(|e| { - McpError::internal_error(format!("Failed to spawn upstream: {}", e), None) - })?; - - // Acquire write lock and check again (double-check pattern) - // Another task might have spawned and inserted while we were spawning above - let mut upstreams = self.upstreams.write().await; - if !upstreams.contains_key(upstream_name) { - upstreams.insert(upstream_name.to_string(), conn); - } - // If another task already inserted, our `conn` is dropped here, which terminates - // the child process via Drop. This prevents duplicate upstream connections. - - Ok(()) - } - - /// Log a message via centralized logging - fn log(&self, message: &str) { - info!(target: "firewall", "{}", message); - } - - /// Parse a namespaced tool name into (upstream, tool_name) - fn parse_tool_name(namespaced: &str) -> Option<(&str, &str)> { - namespaced.split_once(':') - } - - /// Check if a tool is allowed by upstream config - fn is_tool_allowed(&self, upstream_name: &str, tool_name: &str) -> bool { - self.upstream_configs - .get(upstream_name) - .map(|c| c.is_tool_allowed(tool_name)) - .unwrap_or(false) - } -} - -impl ServerHandler for McpFirewall { - fn get_info(&self) -> ServerInfo { - ServerInfo { - instructions: Some( - "MCP Firewall - A secure proxy for accessing multiple MCP servers with policy-based filtering.".into() - ), - capabilities: ServerCapabilities::builder().enable_tools().build(), - ..Default::default() - } - } - - async fn list_tools( - &self, - _request: Option, - _context: RequestContext, - ) -> Result { - Ok(ListToolsResult { - tools: self.tools.clone(), - next_cursor: None, - }) - } - - async fn call_tool( - &self, - request: CallToolRequestParam, - _context: RequestContext, - ) -> Result { - let tool_name = &request.name; - - // Parse namespaced tool name - let (upstream_name, local_tool_name) = match Self::parse_tool_name(tool_name) { - Some((u, t)) => (u, t), - None => { - self.log(&format!( - "BLOCKED {} (invalid format, expected 'upstream:tool')", - tool_name - )); - return Err(McpError::invalid_params( - format!( - "Invalid tool name format. Expected 'upstream:tool', got '{}'", - tool_name - ), - None, - )); - } - }; - - // Check if upstream exists in config - if !self.upstream_configs.contains_key(upstream_name) { - self.log(&format!( - "BLOCKED {} (unknown upstream '{}')", - tool_name, upstream_name - )); - return Err(McpError::invalid_params( - format!("Unknown upstream: '{}'", upstream_name), - None, - )); - } - - // Check if tool is allowed - if !self.is_tool_allowed(upstream_name, local_tool_name) { - self.log(&format!("BLOCKED {} (not in allowlist)", tool_name)); - return Err(McpError::invalid_params( - format!("Tool '{}' is not allowed by firewall policy", tool_name), - None, - )); - } - - // Ensure upstream is spawned (lazy initialization) - self.get_or_spawn_upstream(upstream_name).await?; - - // Log the allowed call - let args_summary = request - .arguments - .as_ref() - .map(|a| { - let s = serde_json::to_string(a).unwrap_or_default(); - if s.len() > 100 { - format!("{}...", &s[..100]) - } else { - s - } - }) - .unwrap_or_default(); - self.log(&format!("ALLOWED {} (args: {})", tool_name, args_summary)); - - // Forward the call to upstream - let mut upstreams = self.upstreams.write().await; - let conn = upstreams.get_mut(upstream_name).ok_or_else(|| { - McpError::internal_error("Upstream connection lost after spawn", None) - })?; - - match conn.call_tool(local_tool_name, request.arguments).await { - Ok(result) => Ok(result), - Err(e) => { - warn!( - "Upstream '{}' error calling '{}': {}", - upstream_name, local_tool_name, e - ); - Err(McpError::internal_error(e.to_string(), None)) - } - } - } -} - -// ============================================================================ -// Entry Point -// ============================================================================ - -/// Start the MCP firewall server -pub async fn run(config_path: &PathBuf) -> Result<()> { - let config = FirewallConfig::from_file(config_path)?; - - let policy = FirewallPolicy { config }; - - let firewall = McpFirewall::new(policy)?; - - firewall.log("MCP Firewall started"); - firewall.log(&format!("Upstreams ({}):", firewall.upstream_configs.len())); - for (name, config) in &firewall.upstream_configs { - firewall.log(&format!(" [{}] command: {}", name, config.command)); - firewall.log(&format!(" [{}] allowed: {:?}", name, config.allowed)); - } - firewall.log(&format!("Total tools exposed: {}", firewall.tools.len())); - - // Run as MCP server on stdio - let service = firewall.serve(stdio()).await.inspect_err(|e| { - error!("Error starting MCP firewall: {}", e); - })?; - - service - .waiting() - .await - .map_err(|e| anyhow::anyhow!("MCP firewall exited with error: {:?}", e))?; - - Ok(()) -} - -// ============================================================================ -// Tests -// ============================================================================ - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_upstream_config_is_tool_allowed_exact() { - let config = UpstreamConfig { - command: "test".to_string(), - args: vec![], - env: HashMap::new(), - allowed: vec!["create_incident".to_string(), "get_incident".to_string()], - spawn_timeout_secs: 30, - }; - - assert!(config.is_tool_allowed("create_incident")); - assert!(config.is_tool_allowed("get_incident")); - assert!(!config.is_tool_allowed("delete_incident")); - assert!(!config.is_tool_allowed("list_incidents")); - } - - #[test] - fn test_upstream_config_is_tool_allowed_wildcard() { - let config = UpstreamConfig { - command: "test".to_string(), - args: vec![], - env: HashMap::new(), - allowed: vec!["*".to_string()], - spawn_timeout_secs: 30, - }; - - assert!(config.is_tool_allowed("anything")); - assert!(config.is_tool_allowed("create_incident")); - assert!(config.is_tool_allowed("dangerous_delete_all")); - } - - #[test] - fn test_upstream_config_is_tool_allowed_prefix_wildcard() { - let config = UpstreamConfig { - command: "test".to_string(), - args: vec![], - env: HashMap::new(), - allowed: vec!["get_*".to_string(), "list_*".to_string()], - spawn_timeout_secs: 30, - }; - - assert!(config.is_tool_allowed("get_incident")); - assert!(config.is_tool_allowed("get_user")); - assert!(config.is_tool_allowed("list_incidents")); - assert!(!config.is_tool_allowed("create_incident")); - assert!(!config.is_tool_allowed("delete_all")); - } - - #[test] - fn test_firewall_config_from_json() { - let json = r#"{ - "upstreams": { - "icm": { - "command": "icm-mcp", - "args": ["--verbose"], - "allowed": ["create_incident", "get_incident"] - }, - "kusto": { - "command": "kusto-mcp", - "allowed": ["query"] - } - } - }"#; - - let config: FirewallConfig = serde_json::from_str(json).unwrap(); - - assert_eq!(config.upstreams.len(), 2); - assert!(config.upstreams.contains_key("icm")); - assert!(config.upstreams.contains_key("kusto")); - - let icm = &config.upstreams["icm"]; - assert_eq!(icm.command, "icm-mcp"); - assert_eq!(icm.args, vec!["--verbose"]); - assert_eq!(icm.allowed, vec!["create_incident", "get_incident"]); - assert_eq!(icm.spawn_timeout_secs, 30, "Should default to 30 seconds"); - - let kusto = &config.upstreams["kusto"]; - assert_eq!(kusto.command, "kusto-mcp"); - assert!(kusto.args.is_empty()); - assert_eq!(kusto.allowed, vec!["query"]); - assert_eq!(kusto.spawn_timeout_secs, 30, "Should default to 30 seconds"); - } - - #[test] - fn test_parse_tool_name() { - assert_eq!( - McpFirewall::parse_tool_name("icm:create_incident"), - Some(("icm", "create_incident")) - ); - assert_eq!( - McpFirewall::parse_tool_name("kusto:query"), - Some(("kusto", "query")) - ); - assert_eq!(McpFirewall::parse_tool_name("no_colon"), None); - assert_eq!( - McpFirewall::parse_tool_name("multiple:colons:here"), - Some(("multiple", "colons:here")) - ); - } - - #[test] - fn test_firewall_config_default() { - let config = FirewallConfig::default(); - assert!(config.upstreams.is_empty()); - } - - #[test] - fn test_upstream_config_timeout_custom() { - let json = r#"{ - "upstreams": { - "slow-service": { - "command": "slow-mcp", - "allowed": ["*"], - "spawn_timeout_secs": 60 - } - } - }"#; - - let config: FirewallConfig = serde_json::from_str(json).unwrap(); - let slow_service = &config.upstreams["slow-service"]; - - assert_eq!( - slow_service.spawn_timeout_secs, 60, - "Should use custom timeout of 60 seconds" - ); - } - - #[test] - fn test_upstream_config_timeout_default() { - let json = r#"{ - "upstreams": { - "normal-service": { - "command": "normal-mcp", - "allowed": ["*"] - } - } - }"#; - - let config: FirewallConfig = serde_json::from_str(json).unwrap(); - let normal_service = &config.upstreams["normal-service"]; - - assert_eq!( - normal_service.spawn_timeout_secs, 30, - "Should default to 30 seconds when not specified" - ); - } -} diff --git a/src/mcp_metadata.rs b/src/mcp_metadata.rs deleted file mode 100644 index f917c8d..0000000 --- a/src/mcp_metadata.rs +++ /dev/null @@ -1,146 +0,0 @@ -//! MCP Metadata - Bundled tool definitions for agency MCPs -//! -//! This module provides access to pre-discovered MCP tool metadata that is -//! embedded at compile time. The metadata is refreshed by running: -//! -//! ```bash -//! # On Windows -//! ./refresh-mcp-metadata.ps1 -//! -//! # On Linux/macOS -//! ./refresh-mcp-metadata.sh -//! ``` -//! -//! The scripts query each built-in agency MCP and save the tool definitions -//! to `mcp-metadata.json`, which is then embedded into the binary. - -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; - -/// Bundled MCP metadata (embedded at compile time) -const BUNDLED_METADATA: &str = include_str!("../mcp-metadata.json"); - -/// Metadata for a single tool -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ToolMetadata { - /// Tool name (without namespace prefix) - pub name: String, - /// Human-readable description - #[serde(skip_serializing_if = "Option::is_none")] - pub description: Option, - /// JSON schema for input parameters - #[serde(skip_serializing_if = "Option::is_none")] - pub input_schema: Option, -} - -/// Metadata for an MCP server -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct McpMetadata { - /// Server name/identifier - pub name: String, - /// Whether this is a built-in agency MCP - #[serde(default)] - pub builtin: bool, - /// Available tools - #[serde(default)] - pub tools: Vec, - /// When this metadata was last refreshed (ISO 8601) - #[serde(skip_serializing_if = "Option::is_none")] - pub refreshed_at: Option, - /// Error message if discovery failed - #[serde(skip_serializing_if = "Option::is_none")] - pub error: Option, -} - -/// Collection of MCP metadata -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct McpMetadataFile { - /// Schema version for forward compatibility - pub version: String, - /// When this file was generated - pub generated_at: String, - /// Metadata for each MCP server - pub mcps: HashMap, -} - -impl McpMetadataFile { - /// Load the bundled metadata (embedded at compile time) - pub fn bundled() -> Self { - serde_json::from_str(BUNDLED_METADATA) - .expect("Bundled mcp-metadata.json should be valid JSON") - } - - /// Get metadata for a specific MCP - pub fn get(&self, mcp_name: &str) -> Option<&McpMetadata> { - self.mcps.get(mcp_name) - } - - /// Get tools for a specific MCP - pub fn get_tools(&self, mcp_name: &str) -> Option<&[ToolMetadata]> { - self.mcps.get(mcp_name).map(|m| m.tools.as_slice()) - } - - /// Check if a tool exists for an MCP - pub fn has_tool(&self, mcp_name: &str, tool_name: &str) -> bool { - self.mcps - .get(mcp_name) - .map(|m| m.tools.iter().any(|t| t.name == tool_name)) - .unwrap_or(false) - } - - /// Get all known MCP names - pub fn mcp_names(&self) -> Vec<&str> { - self.mcps.keys().map(|s| s.as_str()).collect() - } - - /// Get all built-in MCP names (sorted alphabetically) - pub fn builtin_mcp_names(&self) -> Vec<&str> { - let mut names: Vec<&str> = self - .mcps - .iter() - .filter(|(_, m)| m.builtin) - .map(|(k, _)| k.as_str()) - .collect(); - names.sort(); - names - } - - /// Get all tool names for an MCP (useful for validation) - pub fn tool_names(&self, mcp_name: &str) -> Vec<&str> { - self.mcps - .get(mcp_name) - .map(|m| m.tools.iter().map(|t| t.name.as_str()).collect()) - .unwrap_or_default() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_bundled_metadata_loads() { - let metadata = McpMetadataFile::bundled(); - assert_eq!(metadata.version, "1.0"); - // Should have the known built-in MCPs - assert!(metadata.mcps.contains_key("ado")); - assert!(metadata.mcps.contains_key("icm")); - assert!(metadata.mcps.contains_key("kusto")); - } - - #[test] - fn test_get_mcp() { - let metadata = McpMetadataFile::bundled(); - let ado = metadata.get("ado"); - assert!(ado.is_some()); - assert!(ado.unwrap().builtin); - } - - #[test] - fn test_mcp_names() { - let metadata = McpMetadataFile::bundled(); - let names = metadata.mcp_names(); - assert!(names.contains(&"ado")); - assert!(names.contains(&"icm")); - } -} diff --git a/templates/base.yml b/templates/base.yml index b913366..4b8929b 100644 --- a/templates/base.yml +++ b/templates/base.yml @@ -79,17 +79,17 @@ jobs: - bash: | mkdir -p "$(Agent.TempDirectory)/staging" - # Write MCP firewall configuration to a file - cat > "$(Agent.TempDirectory)/staging/mcp-firewall-config.json" << 'MCP_FIREWALL_EOF' - {{ firewall_config }} - MCP_FIREWALL_EOF + # Write MCPG (MCP Gateway) configuration to a file + cat > "$(Agent.TempDirectory)/staging/mcpg-config.json" << 'MCPG_CONFIG_EOF' + {{ mcpg_config }} + MCPG_CONFIG_EOF - echo "MCP firewall config:" - cat "$(Agent.TempDirectory)/staging/mcp-firewall-config.json" + echo "MCPG config:" + cat "$(Agent.TempDirectory)/staging/mcpg-config.json" # Validate JSON - python3 -m json.tool "$(Agent.TempDirectory)/staging/mcp-firewall-config.json" > /dev/null && echo "JSON is valid" - displayName: "Prepare MCP firewall config" + python3 -m json.tool "$(Agent.TempDirectory)/staging/mcpg-config.json" > /dev/null && echo "JSON is valid" + displayName: "Prepare MCPG config" - bash: | mkdir -p "$HOME/.copilot" @@ -110,28 +110,17 @@ jobs: cp "$AGENTIC_PIPELINES_PATH" /tmp/awf-tools/ado-aw chmod +x /tmp/awf-tools/ado-aw - # Copy MCP firewall config to /tmp - cp "$(Agent.TempDirectory)/staging/mcp-firewall-config.json" /tmp/awf-tools/staging/mcp-firewall-config.json + # Copy MCPG config to /tmp + cp "$(Agent.TempDirectory)/staging/mcpg-config.json" /tmp/awf-tools/staging/mcpg-config.json - # Generate MCP config pointing to /tmp paths (accessible inside AWF container) + # Generate MCP config for copilot CLI pointing to MCPG gateway on host + # The agent inside AWF reaches MCPG via host.docker.internal cat > /tmp/awf-tools/mcp-config.json << EOF { "mcpServers": { - "safeoutputs": { - "type": "stdio", - "tools": [ - "*" - ], - "command": "/tmp/awf-tools/ado-aw", - "args": ["mcp", "/tmp/awf-tools/staging", "{{ working_directory }}"] - }, - "mcp-firewall": { - "type": "stdio", - "tools": [ - "*" - ], - "command": "/tmp/awf-tools/ado-aw", - "args": ["mcp-firewall", "--config", "/tmp/awf-tools/staging/mcp-firewall-config.json"] + "mcpg": { + "type": "http", + "url": "http://host.docker.internal:80/mcp" } } } @@ -195,10 +184,92 @@ jobs: - bash: | docker pull ghcr.io/github/gh-aw-firewall/squid:latest docker pull ghcr.io/github/gh-aw-firewall/agent:latest - displayName: "Pre-pull AWF container images" + docker pull {{ mcpg_image }}:v{{ mcpg_version }} + displayName: "Pre-pull AWF and MCPG container images" {{ prepare_steps }} + # Start SafeOutputs HTTP server on host (MCPG proxies to it) + - bash: | + SAFE_OUTPUTS_PORT=8100 + SAFE_OUTPUTS_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "##vso[task.setvariable variable=SAFE_OUTPUTS_PORT]$SAFE_OUTPUTS_PORT" + echo "##vso[task.setvariable variable=SAFE_OUTPUTS_API_KEY;issecret=true]$SAFE_OUTPUTS_API_KEY" + + # Start SafeOutputs as HTTP server in the background + nohup /tmp/awf-tools/ado-aw mcp-http \ + --port "$SAFE_OUTPUTS_PORT" \ + --api-key "$SAFE_OUTPUTS_API_KEY" \ + "/tmp/awf-tools/staging" \ + "{{ working_directory }}" \ + > "$(Agent.TempDirectory)/staging/logs/safeoutputs.log" 2>&1 & + SAFE_OUTPUTS_PID=$! + echo "##vso[task.setvariable variable=SAFE_OUTPUTS_PID]$SAFE_OUTPUTS_PID" + echo "SafeOutputs HTTP server started on port $SAFE_OUTPUTS_PORT (PID: $SAFE_OUTPUTS_PID)" + + # Wait for server to be ready + READY=false + for i in $(seq 1 30); do + if curl -sf "http://localhost:$SAFE_OUTPUTS_PORT/health" > /dev/null 2>&1; then + echo "SafeOutputs HTTP server is ready" + READY=true + break + fi + sleep 1 + done + if [ "$READY" != "true" ]; then + echo "##vso[task.complete result=Failed]SafeOutputs HTTP server did not become ready within 30s" + exit 1 + fi + displayName: "Start SafeOutputs HTTP server" + + # Start MCP Gateway (MCPG) on host + - bash: | + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "##vso[task.setvariable variable=MCP_GATEWAY_API_KEY;issecret=true]$MCP_GATEWAY_API_KEY" + + # Substitute runtime values into MCPG config + MCPG_CONFIG=$(cat /tmp/awf-tools/staging/mcpg-config.json \ + | sed "s|\${SAFE_OUTPUTS_PORT}|$(SAFE_OUTPUTS_PORT)|g" \ + | sed "s|\${SAFE_OUTPUTS_API_KEY}|$(SAFE_OUTPUTS_API_KEY)|g" \ + | sed "s|\${MCP_GATEWAY_API_KEY}|$MCP_GATEWAY_API_KEY|g") + + # Log the template config (before API key substitution) for debugging. + # Logging after substitution would leak MCP_GATEWAY_API_KEY since it's a + # local bash variable — ADO's secret masking only applies in subsequent steps. + echo "Starting MCPG with config template:" + cat /tmp/awf-tools/staging/mcpg-config.json | python3 -m json.tool + + # Start MCPG Docker container on host network. + # The Docker socket mount is required because MCPG spawns stdio-based MCP + # servers as sibling containers. This grants significant host access — acceptable + # here because the pipeline agent is already trusted and network-isolated by AWF. + echo "$MCPG_CONFIG" | docker run -i --rm \ + --name mcpg \ + --network host \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -e MCP_GATEWAY_API_KEY="$MCP_GATEWAY_API_KEY" \ + {{ mcpg_image }}:v{{ mcpg_version }} & + MCPG_PID=$! + echo "##vso[task.setvariable variable=MCPG_PID]$MCPG_PID" + echo "MCPG started (PID: $MCPG_PID)" + + # Wait for MCPG to be ready + READY=false + for i in $(seq 1 30); do + if curl -sf "http://localhost:80/health" > /dev/null 2>&1; then + echo "MCPG is ready" + READY=true + break + fi + sleep 1 + done + if [ "$READY" != "true" ]; then + echo "##vso[task.complete result=Failed]MCPG did not become ready within 30s" + exit 1 + fi + displayName: "Start MCP Gateway (MCPG)" + # Network isolation via AWF (Agentic Workflow Firewall) - bash: | set -o pipefail @@ -210,12 +281,15 @@ jobs: echo "Allowed domains: {{ allowed_domains }}" # AWF provides L7 domain whitelisting via Squid proxy + Docker containers. + # --enable-host-access allows the AWF container to reach host services + # (MCPG and SafeOutputs) via host.docker.internal. # AWF auto-mounts /tmp:/tmp:rw into the container, so copilot binary, # agent prompt, and MCP config are placed under /tmp/awf-tools/. sudo -E "$(Pipeline.Workspace)/awf/awf" \ --allow-domains {{ allowed_domains }} \ --skip-pull \ --env-all \ + --enable-host-access \ --container-workdir "{{ working_directory }}" \ --log-level info \ --proxy-logs-dir "$(Agent.TempDirectory)/staging/logs/firewall" \ @@ -251,6 +325,21 @@ jobs: displayName: "Collect safe outputs from AWF container" condition: always() + - bash: | + # Stop MCPG container + echo "Stopping MCPG..." + docker stop mcpg 2>/dev/null || true + echo "MCPG stopped" + + # Stop SafeOutputs HTTP server + if [ -n "$(SAFE_OUTPUTS_PID)" ]; then + echo "Stopping SafeOutputs (PID: $(SAFE_OUTPUTS_PID))..." + kill "$(SAFE_OUTPUTS_PID)" 2>/dev/null || true + echo "SafeOutputs stopped" + fi + displayName: "Stop MCPG and SafeOutputs" + condition: always() + {{ finalize_steps }} - bash: | diff --git a/tests/compiler_tests.rs b/tests/compiler_tests.rs index 5416c21..3538841 100644 --- a/tests/compiler_tests.rs +++ b/tests/compiler_tests.rs @@ -166,6 +166,34 @@ fn test_compiled_yaml_structure() { template_content.contains("{{ firewall_version }}"), "Template should contain firewall_version marker" ); + + // Verify MCPG integration + assert!( + template_content.contains("{{ mcpg_config }}"), + "Template should contain mcpg_config marker" + ); + assert!( + template_content.contains("{{ mcpg_image }}"), + "Template should contain mcpg_image marker" + ); + assert!( + template_content.contains("{{ mcpg_version }}"), + "Template should contain mcpg_version marker" + ); + assert!( + template_content.contains("--enable-host-access"), + "Template should include --enable-host-access for MCPG" + ); + + // Verify no legacy mcp-firewall references in template + assert!( + !template_content.contains("mcp-firewall-config"), + "Template should not reference legacy mcp-firewall config" + ); + assert!( + !template_content.contains("MCP_FIREWALL_EOF"), + "Template should not contain legacy firewall heredoc" + ); } /// Test that the example file is valid and can be parsed @@ -313,8 +341,7 @@ fn test_fixture_complete_agent() { "Should have mcp-servers" ); - // Verify it has both built-in and custom MCPs - assert!(content.contains("ado: true"), "Should have built-in MCP"); + // Verify it has MCP configuration and custom MCPs assert!(content.contains("command:"), "Should have custom MCP"); } @@ -375,5 +402,29 @@ fn test_compiled_output_no_unreplaced_markers() { "Compiled output should reference GitHub Releases for AWF" ); + // Verify MCPG references + assert!( + compiled.contains("ghcr.io/github/gh-aw-mcpg"), + "Compiled output should reference MCPG Docker image" + ); + assert!( + compiled.contains("host.docker.internal"), + "Compiled output should reference host.docker.internal for MCPG" + ); + assert!( + compiled.contains("--enable-host-access"), + "Compiled output should include --enable-host-access for AWF" + ); + + // Verify no legacy MCP firewall references + assert!( + !compiled.contains("mcp-firewall"), + "Compiled output should not reference legacy mcp-firewall" + ); + assert!( + !compiled.contains("mcp_firewall"), + "Compiled output should not reference legacy mcp_firewall" + ); + let _ = fs::remove_dir_all(&temp_dir); } diff --git a/tests/mcp_firewall_tests.rs b/tests/mcp_firewall_tests.rs deleted file mode 100644 index b65f025..0000000 --- a/tests/mcp_firewall_tests.rs +++ /dev/null @@ -1,281 +0,0 @@ -use std::io::{BufRead, Write}; -use std::process::{Child, Command, Stdio}; -use std::time::Duration; - -/// Guard that kills the child process on drop (even on panic) -struct FirewallGuard { - child: Child, - #[allow(dead_code)] - stderr_thread: Option>, -} - -impl Drop for FirewallGuard { - fn drop(&mut self) { - self.child.kill().ok(); - self.child.wait().ok(); - } -} - -/// Helper to create a temporary config file -fn create_config_file(config: &str) -> (tempfile::TempDir, std::path::PathBuf) { - let temp_dir = tempfile::tempdir().unwrap(); - let config_path = temp_dir.path().join("firewall-config.json"); - std::fs::write(&config_path, config).unwrap(); - (temp_dir, config_path) -} - -/// Helper to start the firewall with a config file -fn start_firewall(config_path: &std::path::PathBuf) -> FirewallGuard { - let binary_path = env!("CARGO_BIN_EXE_ado-aw"); - - let mut cmd = Command::new(binary_path); - cmd.arg("mcp-firewall"); - cmd.arg("--config").arg(config_path); - - cmd.stdin(Stdio::piped()); - cmd.stdout(Stdio::piped()); - cmd.stderr(Stdio::piped()); - - let mut child = cmd.spawn().expect("Failed to start firewall"); - - // Spawn thread to consume stderr - let stderr = child.stderr.take().expect("Failed to capture stderr"); - let stderr_thread = std::thread::spawn(move || { - let reader = std::io::BufReader::new(stderr); - for line in reader.lines() { - if let Ok(line) = line { - eprintln!("[firewall stderr] {}", line); - } else { - break; - } - } - }); - - // Give the firewall a moment to start - std::thread::sleep(Duration::from_millis(200)); - - FirewallGuard { - child, - stderr_thread: Some(stderr_thread), - } -} - -/// Send a JSON-RPC request and get response -fn send_jsonrpc( - child: &mut Child, - method: &str, - params: Option, -) -> serde_json::Value { - static REQUEST_ID: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(1); - let id = REQUEST_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst); - - let request = if let Some(p) = params { - serde_json::json!({ - "jsonrpc": "2.0", - "id": id, - "method": method, - "params": p - }) - } else { - serde_json::json!({ - "jsonrpc": "2.0", - "id": id, - "method": method - }) - }; - - let stdin = child.stdin.as_mut().expect("Failed to get stdin"); - let stdout = child.stdout.as_mut().expect("Failed to get stdout"); - - writeln!(stdin, "{}", serde_json::to_string(&request).unwrap()).unwrap(); - stdin.flush().unwrap(); - - let mut reader = std::io::BufReader::new(stdout); - let mut response_line = String::new(); - reader.read_line(&mut response_line).unwrap(); - - serde_json::from_str(&response_line).unwrap() -} - -#[test] -fn test_firewall_starts_with_empty_config() { - let config = r#"{"upstreams": {}}"#; - let (_temp_dir, config_path) = create_config_file(config); - - let mut guard = start_firewall(&config_path); - - // Initialize the MCP connection - let init_params = serde_json::json!({ - "protocolVersion": "2024-11-05", - "capabilities": {}, - "clientInfo": { - "name": "test-client", - "version": "1.0" - } - }); - - let response = send_jsonrpc(&mut guard.child, "initialize", Some(init_params)); - - assert!( - response.get("result").is_some(), - "Should get initialize result" - ); - assert!( - response["result"]["serverInfo"]["name"].as_str().is_some(), - "Should have server info" - ); -} - -#[test] -fn test_firewall_lists_no_tools_with_empty_config() { - let config = r#"{"upstreams": {}}"#; - let (_temp_dir, config_path) = create_config_file(config); - - let mut guard = start_firewall(&config_path); - - // Initialize first - let init_params = serde_json::json!({ - "protocolVersion": "2024-11-05", - "capabilities": {}, - "clientInfo": { "name": "test", "version": "1.0" } - }); - send_jsonrpc(&mut guard.child, "initialize", Some(init_params)); - - // Send initialized notification (no response expected, but we need to send it) - let stdin = guard.child.stdin.as_mut().unwrap(); - writeln!( - stdin, - r#"{{"jsonrpc":"2.0","method":"notifications/initialized"}}"# - ) - .unwrap(); - stdin.flush().unwrap(); - - // List tools - let response = send_jsonrpc(&mut guard.child, "tools/list", None); - - assert!( - response.get("result").is_some(), - "Should get tools/list result" - ); - let tools = response["result"]["tools"] - .as_array() - .expect("tools should be array"); - assert!(tools.is_empty(), "Should have no tools with empty config"); -} - -#[test] -fn test_firewall_rejects_unknown_tool() { - let config = r#"{"upstreams": {}}"#; - let (_temp_dir, config_path) = create_config_file(config); - - let mut guard = start_firewall(&config_path); - - // Initialize - let init_params = serde_json::json!({ - "protocolVersion": "2024-11-05", - "capabilities": {}, - "clientInfo": { "name": "test", "version": "1.0" } - }); - send_jsonrpc(&mut guard.child, "initialize", Some(init_params)); - - let stdin = guard.child.stdin.as_mut().unwrap(); - writeln!( - stdin, - r#"{{"jsonrpc":"2.0","method":"notifications/initialized"}}"# - ) - .unwrap(); - stdin.flush().unwrap(); - - // Try to call a tool that doesn't exist - let call_params = serde_json::json!({ - "name": "unknown:tool", - "arguments": {} - }); - let response = send_jsonrpc(&mut guard.child, "tools/call", Some(call_params)); - - assert!( - response.get("error").is_some(), - "Should get error for unknown tool" - ); - let error = &response["error"]; - assert!( - error["message"] - .as_str() - .unwrap_or("") - .contains("Unknown upstream"), - "Error should mention unknown upstream, got: {:?}", - error - ); -} - -#[test] -fn test_firewall_rejects_invalid_tool_format() { - let config = r#"{"upstreams": {}}"#; - let (_temp_dir, config_path) = create_config_file(config); - - let mut guard = start_firewall(&config_path); - - // Initialize - let init_params = serde_json::json!({ - "protocolVersion": "2024-11-05", - "capabilities": {}, - "clientInfo": { "name": "test", "version": "1.0" } - }); - send_jsonrpc(&mut guard.child, "initialize", Some(init_params)); - - let stdin = guard.child.stdin.as_mut().unwrap(); - writeln!( - stdin, - r#"{{"jsonrpc":"2.0","method":"notifications/initialized"}}"# - ) - .unwrap(); - stdin.flush().unwrap(); - - // Try to call a tool without namespace - let call_params = serde_json::json!({ - "name": "no_colon_here", - "arguments": {} - }); - let response = send_jsonrpc(&mut guard.child, "tools/call", Some(call_params)); - - assert!( - response.get("error").is_some(), - "Should get error for invalid format" - ); - let error = &response["error"]; - assert!( - error["message"] - .as_str() - .unwrap_or("") - .contains("Invalid tool name format"), - "Error should mention invalid format, got: {:?}", - error - ); -} - -#[test] -fn test_config_parsing() { - // Test that we can parse a realistic config - let config = r#"{ - "upstreams": { - "icm": { - "command": "icm-mcp", - "args": ["--verbose"], - "env": {"ICM_TOKEN": "secret"}, - "allowed": ["create_incident", "get_*"] - }, - "kusto": { - "command": "kusto-mcp", - "allowed": ["query"] - } - } - }"#; - - let parsed: serde_json::Value = serde_json::from_str(config).unwrap(); - - assert_eq!(parsed["upstreams"]["icm"]["command"], "icm-mcp"); - assert_eq!(parsed["upstreams"]["icm"]["args"][0], "--verbose"); - assert_eq!(parsed["upstreams"]["icm"]["allowed"][0], "create_incident"); - assert_eq!(parsed["upstreams"]["icm"]["allowed"][1], "get_*"); - assert_eq!(parsed["upstreams"]["kusto"]["allowed"][0], "query"); -}