diff --git a/README.md b/README.md index 1ef7869e..03a71283 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ A Model Context Protocol (MCP) server that provides tools for interacting with C - **JWE Authentication**: Optional JWE-based authentication with encryption for secure database access - **TLS Support**: Full TLS encryption support for both ClickHouse® connections and MCP server endpoints - **Comprehensive Tools**: Built-in tools for listing tables, describing schemas, and executing queries -- **Dynamic Tools**: Automatically generate MCP tools from ClickHouse® views (see [Dynamic Tools Documentation](docs/dynamic_tools.md)) +- **Dynamic Tools**: Automatically generate MCP tools from ClickHouse® views and tables (see [Tools Documentation](docs/tools.md)) - **Resource Templates**: Dynamic resource discovery for database schemas and table information - **Query Prompts**: AI-assisted query building and optimization prompts - **Configuration Management**: Flexible configuration via files, environment variables, or CLI flags @@ -203,7 +203,7 @@ logging: level: "info" ``` -> **Note**: For detailed information about dynamic tools configuration, see the [Dynamic Tools Documentation](docs/dynamic_tools.md). +> **Note**: For detailed information about dynamic tools configuration, see the [Tools Documentation](docs/tools.md). Use the configuration file: @@ -247,7 +247,7 @@ View `COMMENT` supports either: - A plain string description. - A strict JSON object with top-level `title`, `description`, and `annotations`. -See [Dynamic Tools Documentation](docs/dynamic_tools.md) for examples and supported metadata. +See [Tools Documentation](docs/tools.md) for examples and supported metadata. ## Available Resources diff --git a/docs/dynamic_tools.md b/docs/dynamic_tools.md deleted file mode 100644 index bdee80fe..00000000 --- a/docs/dynamic_tools.md +++ /dev/null @@ -1,300 +0,0 @@ -# Dynamic Tools - -Dynamic tools in Altinity MCP Server allow you to automatically generate MCP tools from ClickHouse views. This feature enables you to expose parameterized views as callable tools through the MCP interface. - -## Overview - -Dynamic tools are configured in the `server.dynamic_tools` section of the configuration file. The server will query `system.tables` for views and create corresponding MCP tools based on the rules you define. - -## Configuration - -Each dynamic tool rule consists of three optional fields: - -- **`name`** (optional): The explicit name for the generated tool. When specified, the regexp must match exactly one view from `system.tables`. If not specified, the tool name is generated from the matched view name. -- **`regexp`** (required): A regular expression pattern to match against view names in the format `database.view_name`. -- **`prefix`** (optional): A prefix to prepend to the tool name (applied before name generation or to the explicit name). - -### Basic Configuration - -```yaml -server: - dynamic_tools: - - regexp: "mydb\\..*" - prefix: "db_" -``` - -This configuration will: -- Match all views in the `mydb` database -- Generate tool names like `db_mydb_my_view` for each matched view - -### Named Tool Configuration - -```yaml -server: - dynamic_tools: - - name: "get_user_data" - regexp: "users\\.user_info_view" - prefix: "api_" -``` - -This configuration will: -- Match exactly one view: `users.user_info_view` -- Create a tool named `api_get_user_data` -- Log an error if the regexp matches zero or multiple views - -### Mixed Configuration - -You can combine both approaches: - -```yaml -server: - dynamic_tools: - # Generate tools for all views in analytics database - - regexp: "analytics\\..*" - prefix: "analytics_" - - # Specific tool with custom name - - name: "get_current_metrics" - regexp: "monitoring\\.current_metrics_view" - - # Another specific tool with prefix - - name: "user_report" - regexp: "reports\\.user_activity" - prefix: "report_" -``` - -## View Parameters - -Dynamic tools automatically detect parameters in your ClickHouse view definitions. Parameters are defined using the `{parameter_name: type}` syntax in the view's SQL. - -### Example View with Parameters - -```sql -CREATE VIEW analytics.user_sessions AS -SELECT - user_id, - session_start, - session_end, - duration_seconds -FROM sessions -WHERE user_id = {user_id: UInt64} - AND session_start >= {start_date: Date} - AND session_start < {end_date: Date} -COMMENT 'Get user session data for a specific date range' -``` - -This view will generate a tool with three parameters: -- `user_id` (number/integer) -- `start_date` (string/date) -- `end_date` (string/date) - -The view's `COMMENT` can be either: -- A plain string, which becomes the tool description. -- A strict JSON object with top-level tool metadata. - -If no comment is provided, Altinity MCP generates a default human-readable title, a default read-only description, and read-only MCP annotations. - -## Tool Metadata in `COMMENT` - -Altinity MCP supports strict JSON metadata in a view `COMMENT` for these top-level fields: - -- `title` -- `description` -- `annotations.openWorldHint` - -Example: - -```sql -CREATE OR REPLACE VIEW mcp.search AS -SELECT number, title -FROM github_events -WHERE title ILIKE '%' || {query: String} || '%' -COMMENT '{"title":"GitHub Search","description":"Returns issues with matching titles.","annotations":{"openWorldHint":true}}' -``` - -Notes: - -- Dynamic view-backed tools are always exposed with: - - `readOnlyHint=true` - - `destructiveHint=false` -- `annotations.openWorldHint` is optional and may be set to `true` when the tool interacts with arbitrary external targets. -- If the `COMMENT` is not valid JSON, it is treated as a plain description string. -- Parameter-level metadata is not supported in `COMMENT` yet. - -## MCP Safety Hints - -Dynamic tools generated from ClickHouse views are exposed as read-only MCP tools. This matches the OpenAI Apps SDK guidance for tool annotations and reduces unnecessary confirmation prompts in compatible clients. - -Relevant references: - -- [Apps SDK Reference](https://developers.openai.com/apps-sdk/reference) -- [Define tools](https://developers.openai.com/apps-sdk/plan/tools) -- [Build your MCP server](https://developers.openai.com/apps-sdk/build/mcp-server) -- [MCP concepts / server docs](https://developers.openai.com/apps-sdk/concepts/mcp-server) - -## Type Mapping - -ClickHouse types are automatically mapped to JSON Schema types for the MCP tool interface: - -| ClickHouse Type | JSON Type | JSON Format | -|----------------|-----------|-------------| -| Int*, UInt* | integer | int64 | -| Float*, Decimal* | number | double | -| Bool, UInt8 | boolean | - | -| Date, Date32 | string | date | -| DateTime* | string | date-time | -| UUID | string | uuid | -| Other types | string | - | - -## Tool Name Generation - -Tool names are generated using the following rules: - -1. If `name` is specified: `snake_case(prefix + name)` -2. If `name` is not specified: `snake_case(prefix + database.view_name)` - -The `snake_case` function converts the name to lowercase and replaces non-alphanumeric characters with underscores. - -### Examples - -| Name | Regexp | Prefix | Matched View | Generated Tool Name | -|------|--------|--------|--------------|---------------------| -| - | `mydb\\.my_view` | `api_` | `mydb.my_view` | `api_mydb_my_view` | -| `get_data` | `mydb\\.my_view` | `api_` | `mydb.my_view` | `api_get_data` | -| `get_data` | `mydb\\.my_view` | - | `mydb.my_view` | `get_data` | -| - | `mydb\\..*` | - | `mydb.users` | `mydb_users` | - -## Validation Rules - -The dynamic tools system enforces several validation rules: - -1. **Regexp Validity**: All regular expressions must be valid. Invalid patterns are logged as errors and skipped. - -2. **No Overlaps**: Each view can only be matched by one rule. If a view matches multiple rules, it will be logged as an error and skipped. - -3. **Named Rules Must Match Exactly Once**: When a rule specifies a `name`, the `regexp` must match exactly one view from `system.tables`. The system will log an error if: - - The regexp matches zero views - - The regexp matches more than one view - -4. **View Requirements**: Only views with `engine='View'` are considered for dynamic tool generation. - -## Error Handling - -The dynamic tools system logs errors in the following cases: - -- **Invalid regexp**: `dynamic_tools: invalid regexp, skipping rule` -- **Overlap detected**: `dynamic_tools: overlap between rules detected for view` -- **Named rule matches zero views**: `dynamic_tools: named rule matched no views` -- **Named rule matches multiple views**: `dynamic_tools: named rule matched multiple views, expected exactly one` - -These errors are logged but do not prevent the server from starting. Valid rules will still be processed. - -## Complete Example - -```yaml -clickhouse: - host: localhost - port: 8123 - database: default - username: default - password: "" - protocol: http - -server: - transport: http - address: 0.0.0.0 - port: 8080 - openapi: - enabled: true - dynamic_tools: - # Match all views in analytics database - - regexp: "analytics\\..*" - prefix: "analytics_" - - # Specific tool for user data - - name: "get_user_info" - regexp: "users\\.user_info_view" - prefix: "api_" - - # Specific tool for metrics (no prefix) - - name: "current_metrics" - regexp: "monitoring\\.metrics_view" - -logging: - level: info -``` - -## OpenAPI Integration - -Dynamic tools are automatically exposed through the OpenAPI endpoints when `server.openapi.enabled` is set to `true`. Each tool gets: - -- A POST endpoint at `/{jwe_token}/openapi/{tool_name}` -- Request body schema based on the view parameters -- Response schema for the query results - -## Best Practices - -1. **Use descriptive names**: When using the `name` field, choose clear, descriptive names that indicate the tool's purpose. - -2. **Add comments to views**: Use the `COMMENT` clause in your view definitions to provide meaningful descriptions for the generated tools. - Use strict JSON if you want to set `title` or explicit MCP annotations. - -3. **Use specific regexps**: For named tools, use specific regular expressions to ensure only one view matches. - -4. **Organize by database**: Group related views in the same database and use regexp patterns to generate tools for entire databases. - -5. **Test your regexps**: Before deploying, test your regular expressions to ensure they match the intended views. - -6. **Monitor logs**: Check the server logs during startup to catch any validation errors or misconfigurations. - -## Troubleshooting - -### Tool not being generated - -1. Check that the view exists in `system.tables` with `engine='View'` -2. Verify the regexp pattern matches the view name in the format `database.view_name` -3. Check for overlap errors in the logs - -### Named tool reports "matched no views" - -1. Verify the view name format is `database.view_name` -2. Check that the view exists in ClickHouse -3. Ensure the regexp pattern correctly escapes special characters (e.g., `\\.` for dots) - -### Named tool reports "matched multiple views" - -1. Make the regexp more specific to match only one view -2. Consider using a different approach or splitting into multiple rules - -### Dynamic tools not loading when JWE or OAuth forward mode is enabled - -When JWE authentication is enabled, ClickHouse credentials are normally embedded in the per-user JWE token. In OAuth forward mode, static credentials are automatically cleared for user requests and each request uses the forwarded bearer token instead. However, dynamic tool discovery runs at the server level — it queries `system.tables` once at startup, before any user request arrives, so there is no per-user token in context. - -The server handles this by falling back to the static `clickhouse` credentials from the config file for discovery. This means **you must configure static ClickHouse credentials** (even minimal read-only ones) so the server can reach ClickHouse for view discovery at startup: - -```yaml -clickhouse: - host: my-clickhouse.internal - port: 8123 - username: "mcp_discovery" # needs SELECT on system.tables only - password: "secret" - -server: - jwe: - enabled: true - jwe_secret_key: "..." - dynamic_tools: - - regexp: "mydb\\..*" -``` - -Without these credentials the server logs: -``` -WRN Failed to ensure dynamic tools error="dynamic_tools: failed to get ClickHouse client: missing JWE token" -``` -and retries on every request until credentials are provided. - -### Parameters not detected - -1. Ensure parameters are defined using the correct syntax: `{param_name: Type}` -2. Check that the parameter names are valid identifiers (letters, numbers, underscores) -3. Verify the view's `CREATE` statement is accessible in `system.tables.create_table_query` diff --git a/docs/howto_integrate.md b/docs/howto_integrate.md index f99f0e35..6beed88f 100644 --- a/docs/howto_integrate.md +++ b/docs/howto_integrate.md @@ -71,7 +71,7 @@ To integrate Altinity MCP with Claude.ai: ![Claude Web Integration Step 2](screenshots/claude_web_connectors_1.jpg) -* Enter a name for your connector (e.g., `altinity-mcp-jwe`) and the server URL, which should include the JWE token. For example: `https://host/token/http`. +* Enter a name for your connector (e.g., `altinity-mcp-jwe`) and the server URL, which should include the JWE token. For example: `https://host/token`. ![Claude Web Integration Step 3](screenshots/claude_web_connectors_2.jpg) @@ -148,7 +148,7 @@ To integrate Altinity MCP with Claude Desktop: "mcpServers": { "altinity-mcp": { "command": "npx", - "args": ["-y", "mcp-remote", "http{s}://your-mcp-host:port/generated_jwe_token/http"] + "args": ["-y", "mcp-remote", "http{s}://your-mcp-host:port/generated_jwe_token"] } } } @@ -162,10 +162,10 @@ Download and install Claude Code https://www.anthropic.com/claude-code To integrate Altinity MCP with Claude Code, you can use the `claude mcp add` command for HTTP transport: ```bash -claude mcp add --transport http altinity-mcp https://your-mcp-host:port/generated_jwe_token/http +claude mcp add --transport http altinity-mcp https://your-mcp-host:port/generated_jwe_token ``` -Replace `https://your-mcp-host:port/generated_jwe_token/http` with the actual URL of your Altinity MCP server, including the JWE token and `/http` suffix. +Replace `https://your-mcp-host:port/generated_jwe_token` with the actual URL of your Altinity MCP server, including the JWE token. This command will configure Claude Code to use your Altinity MCP server as a tool provider. @@ -180,7 +180,7 @@ Pasting the following configuration into your Cursor `~/.cursor/mcp.json` file i { "mcpServers": { "altinity-mcp": { - "url": "https://your-mcp-server-url:port/default/http", + "url": "https://your-mcp-server-url:port/default", "headers": { "Authorization": "Bearer your-jwe-token" } @@ -200,7 +200,7 @@ Add this to your Windsurf MCP config file. See [Windsurf MCP docs](https://docs. { "mcpServers": { "context7": { - "serverUrl": "https://your-mcp-server-url:port/default/http", + "serverUrl": "https://your-mcp-server-url:port/default", "headers": { "Authorization": "Bearer your-jwe-token" } @@ -215,7 +215,7 @@ Add this to your Windsurf MCP config file. See [Windsurf MCP docs](https://docs. If you encounter issues during integration: 1. Verify your Altinity MCP server is running and accessible -2. Ensure your JWE token is valid and not (use `curl -vvv https://your-mcp-host/your_jwe_token/http`) +2. Ensure your JWE token is valid (use `curl -vvv https://your-mcp-host/your_jwe_token`) 3. Check that the server configuration matches the integration settings 4. Confirm network connectivity between the AI tool and your Altinity MCP server 5. Review server logs for authentication or connection errors diff --git a/docs/oauth_authorization.md b/docs/oauth_authorization.md index 07e33eb2..d9d397c1 100644 --- a/docs/oauth_authorization.md +++ b/docs/oauth_authorization.md @@ -248,7 +248,7 @@ For direct bearer-token use, a plain reverse proxy is usually enough. For browser-based MCP login, the frontend must expose two public URL spaces: -- the protected resource, for example `https://PUBLIC_HOST.example.com/http` +- the protected resource, for example `https://PUBLIC_HOST.example.com/` - the OAuth authorization server, for example `https://PUBLIC_HOST.example.com/oauth` The proxy must: @@ -263,7 +263,7 @@ The proxy must: Example nginx configuration: ```nginx -location ^~ /http { +location / { proxy_http_version 1.1; proxy_set_header Host $host; proxy_set_header Authorization $http_authorization; @@ -289,8 +289,8 @@ server: mode: "forward" gating_secret_key: "CHANGE_ME_TO_A_RANDOM_SECRET" issuer: "https://accounts.google.com" - audience: "https://PUBLIC_HOST.example.com/http" - public_resource_url: "https://PUBLIC_HOST.example.com/http" + audience: "https://PUBLIC_HOST.example.com/" + public_resource_url: "https://PUBLIC_HOST.example.com/" public_auth_server_url: "https://PUBLIC_HOST.example.com/oauth" client_id: "YOUR_GOOGLE_WEB_CLIENT.apps.googleusercontent.com" client_secret: "YOUR_GOOGLE_CLIENT_SECRET" diff --git a/docs/tools.md b/docs/tools.md new file mode 100644 index 00000000..35f94374 --- /dev/null +++ b/docs/tools.md @@ -0,0 +1,372 @@ +# Tools + +Altinity MCP Server exposes ClickHouse functionality to MCP clients through **tools**. There are two categories: + +- **Static tools** — fixed, registered at startup, always available regardless of ClickHouse state. +- **Dynamic tools** — discovered lazily from ClickHouse (views and tables) on the first authenticated request and surfaced to clients via `notifications/tools/list_changed`. + +Both kinds are configured under a single unified key: `server.tools`. + +--- + +## Static tools + +Two static tools are built into the server: + +| Tool | Description | readOnlyHint | destructiveHint | +|------|-------------|--------------|-----------------| +| `execute_query` | Read-only SQL execution. Rejects any statement that is not `SELECT`, `WITH`, `SHOW`, `DESCRIBE`, `EXISTS`, or `EXPLAIN`. | `true` | `false` | +| `write_query` | Arbitrary SQL (including DDL/DML). Only registered when `clickhouse.read_only: false`. | `false` | `true` | + +`execute_query` accepts: + +- `query` (string, required) — the SQL to run. +- `limit` (integer, optional) — caps returned rows. +- `settings` (object, optional) — ClickHouse query settings forwarded with the request. + +`write_query` accepts the same `query` and `settings` parameters and executes the statement as-is. + +--- + +## Read dynamic tools (Views) + +Parameters are declared with ClickHouse's `{name: Type}` placeholder syntax in the view body: + +```sql +CREATE VIEW analytics.user_sessions AS +SELECT user_id, session_start, duration_seconds +FROM sessions +WHERE user_id = {user_id: UInt64} + AND session_start >= {start_date: Date} + AND session_start < {end_date: Date} +COMMENT 'Get user sessions for a given date range'; +``` + +The resulting tool exposes `user_id` (integer), `start_date` (string/date), `end_date` (string/date). + +### Tool metadata via `COMMENT` + +A view's `COMMENT` becomes the tool description. Two forms are supported: + +1. **Plain string** — used directly as the description. +2. **Strict JSON object** with any of: + - `title` + - `description` + - `annotations.openWorldHint` + +Example: + +```sql +CREATE OR REPLACE VIEW mcp.search AS +SELECT number, title +FROM github_events +WHERE title ILIKE '%' || {query: String} || '%' +COMMENT '{"title":"GitHub Search","description":"Returns issues with matching titles.","annotations":{"openWorldHint":true}}'; +``` + +Notes: + +- If the comment is not valid JSON, it is treated as a plain description. +- Read dynamic tools are always exposed with `readOnlyHint=true`, `destructiveHint=false`. The JSON form only influences `title`, `description`, and `openWorldHint`. +- Per-parameter metadata in `COMMENT` is not supported yet. +- If no comment is provided, a default title and description are generated. + +--- + +## Write dynamic tools (Tables) + +If you need to insert single row to the table, constructing INSERT statement would burn some amount of tokens. Having a tool is more economical. + +For `type: write` with `mode: insert`, the tool accepts one row at a time. Its parameters are built from `system.columns`: + +- Included: columns with `default_kind = ''` (no DEFAULT, MATERIALIZED, EPHEMERAL, or ALIAS). +- Excluded: columns with any default expression. + +The tool inserts a single row using the provided values. Bulk or streaming inserts are not supported through this mode. + +--- + +## Parameter descriptions + +Every dynamic-tool parameter carries a human-readable `description` in its JSON Schema. It is resolved in this order: + +1. **Tool-level JSON `COMMENT` with a `params` map** — works for both views and tables. Highest priority. +2. **Column-level `COMMENT` from `system.columns`** — applies to write tools (tables); each column's comment becomes its parameter description. +3. **ClickHouse type string** — final fallback (e.g. `"UInt64"`, `"DateTime"`). + +View parameters (`{name: Type}` slots in the SELECT body) aren't real columns, so level 2 doesn't apply to them — use the JSON `COMMENT` `params` map to describe them. + +### View example (JSON `params` map is the only source) + +```sql +CREATE VIEW analytics.user_sessions AS +SELECT user_id, session_start +FROM sessions +WHERE user_id = {user_id: UInt64} + AND session_start >= {since: Date} +COMMENT '{ + "description": "Get user sessions for a given date range", + "params": { + "user_id": "User ID to fetch", + "since": "Start of date range (inclusive)" + } +}'; +``` + +### Table example (column comments + optional JSON override) + +```sql +CREATE TABLE events.clicks ( + user_id UInt64 COMMENT 'User who clicked', + target String COMMENT 'URL clicked' +) ENGINE = Log +COMMENT '{ + "params": { + "target": "Full target URL including query string" + } +}'; +``` + +Here `user_id` uses its column comment (`"User who clicked"`) and `target` is overridden by the tool-level JSON (`"Full target URL including query string"`). Columns without comments fall back to the ClickHouse type string. + +--- + +## Default behavior (no `tools` config) + +If both `server.tools` and the legacy `server.dynamic_tools` are empty, the server registers: + +- `execute_query` (always) +- `write_query` (only if `clickhouse.read_only: false`) + +No dynamic discovery runs. You only get dynamic tools by adding a `regexp` entry under `server.tools`. + +```yaml +clickhouse: + host: localhost + port: 8123 + username: default + password: "" +# No server.tools block — execute_query (and write_query if not read-only) are auto-registered. +``` + +--- + +## Configuring `server.tools` + +`server.tools` is a list. Each entry describes one tool (static) or one discovery rule (dynamic): + +| Field | Required | Meaning | +|-------|----------|---------| +| `type` | yes | `"read"` or `"write"`. | +| `name` | static only | `"execute_query"` or `"write_query"`. | +| `regexp` | dynamic only | Regex matched against `database.table_or_view_name`. | +| `prefix` | no | Prepended to auto-generated tool names (dynamic only). | +| `mode` | dynamic write only | Must be `"insert"`. No other value is accepted. | + +Rules enforced at config load: + +- An entry must have **either** `name` **or** `regexp`, never both. +- A `type: write` entry with `regexp` **must** set `mode: insert`. +- Any `mode` value other than `insert` is rejected with an error. +- Invalid regexps are reported at load time and the rule is skipped. +- If a view or table matches multiple rules, that overlap is logged and the later match is skipped. + +--- + +## Example configurations + +### Minimal — static tools only + +```yaml +clickhouse: + host: localhost + port: 8123 + username: default + password: "" +# server.tools omitted => execute_query (+ write_query if not read-only) registered by default. +``` + +### Read-only server with view-backed dynamic tools + +```yaml +clickhouse: + host: localhost + port: 8123 + read_only: true + +server: + tools: + - type: read + name: execute_query + - type: read + regexp: "analytics\\..*_view" + prefix: "analytics_" +``` + +Every view in the `analytics` database whose name ends in `_view` is exposed as a read tool named `analytics__`. + +### Mixed read + write dynamic tools + +```yaml +server: + tools: + - type: read + name: execute_query + - type: read + regexp: "analytics\\..*_view" + prefix: "ro_" + - type: write + regexp: "events\\..*" + prefix: "log_" + mode: insert +``` + +Views under `analytics` become read tools, tables under `events` become insert-only write tools. + +### Legacy `dynamic_tools` (deprecated, still works) + +```yaml +server: + dynamic_tools: + - regexp: "mydb\\..*" + prefix: "db_" +``` + +The server accepts this form but logs a deprecation warning at startup. Prefer `server.tools`. + +--- + +## Dynamic discovery + +Discovery is **lazy** — it runs on the first authenticated tool call, not at startup. This matters for OAuth forward mode, where no ClickHouse credentials exist until a user request arrives. + +After a successful discovery pass, the server emits `notifications/tools/list_changed` so compatible MCP clients refresh their tool list. + +If discovery fails (e.g. credential error, network issue), static tools remain available and discovery is retried on the next authenticated call. + +### What gets discovered + +- **Read tools** — rows in `system.tables` with `engine = 'View'`. Parameters are parsed from `create_table_query` using the `{param_name: Type}` syntax. +- **Write tools** (`mode: insert`) — rows in `system.tables` of any engine. Column metadata comes from `system.columns`; only columns where `default_kind = ''` (no default) are surfaced as required parameters. Columns with defaults are omitted from the tool schema. + +### Credentials used for discovery + +| Auth mode | Credentials used | +|-----------|------------------| +| JWE | The per-request JWE token from the triggering call. | +| OAuth forward | The forwarded Bearer token from the triggering call. | +| Plain / no auth | Static `clickhouse.username` / `clickhouse.password` from config, if set. | + +Static credentials are no longer **required** for discovery in JWE or OAuth-forward setups — whichever token arrives with the first authenticated call is used to probe `system.tables`. + +--- + +## Type mapping + +ClickHouse types are mapped to JSON Schema as follows: + +| ClickHouse type | JSON type | JSON format | +|-----------------|-----------|-------------| +| `Int*`, `UInt*` | `integer` | `int64` | +| `Float*`, `Decimal*` | `number` | `double` | +| `Bool`, `UInt8` (boolean-ish) | `boolean` | — | +| `Date`, `Date32` | `string` | `date` | +| `DateTime*` | `string` | `date-time` | +| `UUID` | `string` | `uuid` | +| Anything else | `string` | — | + +--- + +## Tool name generation + +For dynamic entries (have `regexp`, no `name`), the tool name is: + +``` +snake_case(prefix + database + "_" + view_or_table_name) +``` + +`snake_case` lowercases the input and replaces any run of non-alphanumerics with `_`. + +Examples: + +| Prefix | Matched object | Generated tool name | +|--------|----------------|---------------------| +| `ro_` | `analytics.user_sessions` | `ro_analytics_user_sessions` | +| `log_` | `events.clicks` | `log_events_clicks` | +| _(none)_ | `mydb.users` | `mydb_users` | + +--- + +## MCP safety hints + +| Tool kind | `readOnlyHint` | `destructiveHint` | +|-----------|----------------|-------------------| +| `execute_query` | `true` | `false` | +| Dynamic read (view) | `true` | `false` | +| `write_query` | `false` | `true` | +| Dynamic write (`insert`) | `false` | `false` | + +These hints follow the OpenAI Apps SDK tool-annotation guidance and reduce unnecessary confirmation prompts in compatible clients. + +References: + +- [Apps SDK Reference](https://developers.openai.com/apps-sdk/reference) +- [Define tools](https://developers.openai.com/apps-sdk/plan/tools) +- [Build your MCP server](https://developers.openai.com/apps-sdk/build/mcp-server) +- [MCP concepts / server docs](https://developers.openai.com/apps-sdk/concepts/mcp-server) + +--- + +## OpenAPI integration + +When `server.openapi.enabled: true`, every registered tool — static and dynamic — also gets: + +- A `POST` endpoint at `/{jwe_token}/openapi/{tool_name}` (or the non-JWE variant in other auth modes). +- A request body schema derived from the tool's parameters. +- A response schema matching the query result shape. + +Because dynamic discovery is lazy, dynamic tools only appear in the OpenAPI document **after** the first authenticated call has triggered discovery. + +--- + +## Troubleshooting + +### Dynamic tools don't show up immediately after the server starts + +Expected. Discovery runs on the first authenticated tool call, not at startup. Make one call (e.g. `execute_query`) and clients that honor `notifications/tools/list_changed` will refresh to show the new dynamic tools. Clients that don't honor the notification need to re-list tools manually. + +### A view or table isn't being exposed + +1. Confirm the object exists in `system.tables` (with `engine = 'View'` for read tools). +2. Check that your `regexp` matches `database.name` — remember to escape the dot (`\\.`). +3. Look for overlap warnings: if multiple rules match the same object, the later one is skipped. +4. For write tools, verify you set `mode: insert` and `type: write`. + +### Parameters aren't detected on a view + +1. Use the exact syntax `{param_name: Type}` inside the view body. +2. Parameter names must be valid identifiers. +3. Confirm the server can read `system.tables.create_table_query` for that view. + +### Write tool is missing expected columns + +Columns with defaults (`DEFAULT`, `MATERIALIZED`, `EPHEMERAL`, `ALIAS`) are intentionally omitted from the tool schema — they are filled in by ClickHouse. Only columns with `default_kind = ''` appear as parameters. + +### Discovery keeps failing + +Each failed attempt is logged and retried on the next authenticated call. Common causes: + +- The token (JWE or Bearer) carries credentials that can't read `system.tables` / `system.columns`. +- The configured `clickhouse.host` / `port` aren't reachable from the server. +- A `regexp` is invalid — check startup logs for `invalid regexp, skipping rule`. + +Static tools (`execute_query`, `write_query`) continue to work even while discovery is failing. + +### `mode` validation error at startup + +``` +server.tools: write tool with regexp requires mode: insert +``` + +Add `mode: insert` to the offending write entry. Any other `mode` value is rejected. +test diff --git a/pkg/config/config.go b/pkg/config/config.go index c6381831..1f087db4 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -209,11 +209,15 @@ type ServerConfig struct { JWE JWEConfig `json:"jwe" yaml:"jwe"` OAuth OAuthConfig `json:"oauth" yaml:"oauth"` OpenAPI OpenAPIConfig `json:"openapi" yaml:"openapi" desc:"OpenAPI endpoints configuration"` - CORSOrigin string `json:"cors_origin" yaml:"cors_origin" flag:"cors-origin" desc:"CORS origin for HTTP/SSE transports (default: *)"` - ToolInputSettings []string `json:"tool_input_settings" yaml:"tool_input_settings" desc:"Allowed ClickHouse settings that can be passed via tool arguments"` - BlockedQueryClauses []string `json:"blocked_query_clauses" yaml:"blocked_query_clauses" desc:"AST clause kinds to block: SQL-style names derived from clickhouse-sql-parser types (e.g. WHERE, SETTINGS, FORMAT, SET, EXPLAIN) or full type stems (WHERECLAUSE); INTO OUTFILE is a special form"` - // DynamicTools defines rules for generating tools from ClickHouse views - DynamicTools []DynamicToolRule `json:"dynamic_tools" yaml:"dynamic_tools"` + CORSOrigin string `json:"cors_origin" yaml:"cors_origin" flag:"cors-origin" desc:"CORS origin for HTTP/SSE transports (default: *)"` + ToolInputSettings []string `json:"tool_input_settings" yaml:"tool_input_settings" desc:"Allowed ClickHouse settings that can be passed via tool arguments"` + BlockedQueryClauses []string `json:"blocked_query_clauses" yaml:"blocked_query_clauses" desc:"AST clause kinds to block: SQL-style names derived from clickhouse-sql-parser types (e.g. WHERE, SETTINGS, FORMAT, SET, EXPLAIN) or full type stems (WHERECLAUSE); INTO OUTFILE is a special form"` + // Tools is the unified tool configuration (static + dynamic in one array). + // Static tools: type + name. Dynamic tools: type + regexp + prefix + mode. + Tools []ToolDefinition `json:"tools" yaml:"tools" desc:"Tool definitions (static and dynamic)"` + // DynamicTools is the legacy rule list for generating tools from ClickHouse views. + // DEPRECATED: use Tools instead. Retained for backwards compatibility. + DynamicTools []DynamicToolRule `json:"dynamic_tools" yaml:"dynamic_tools" desc:"(Deprecated: use tools instead) Rules for generating tools from ClickHouse views"` } // OpenAPIConfig defines OpenAPI endpoints configuration @@ -222,11 +226,31 @@ type OpenAPIConfig struct { TLS bool `json:"tls" yaml:"tls" desc:"Use TLS (https) for OpenAPI endpoints"` } -// DynamicToolRule describes a rule to create dynamic tools from views +// ToolDefinition describes a tool in the unified tools configuration. +// +// - Static tool: Type + Name (no Regexp). Currently supported names: +// "execute_query" (read), "write_query" (write). +// - Dynamic tool: Type + Regexp (+ optional Name/Prefix). Type "read" +// discovers views; type "write" discovers tables. Dynamic write tools +// require Mode (currently only "insert" is implemented). +type ToolDefinition struct { + Type string `json:"type" yaml:"type"` // "read" or "write" + Name string `json:"name" yaml:"name"` // static tool name, or label for dynamic rule + Regexp string `json:"regexp" yaml:"regexp"` // dynamic discovery pattern (matched against db.name) + Prefix string `json:"prefix" yaml:"prefix"` // tool-name prefix for discovered tools + Mode string `json:"mode" yaml:"mode"` // "insert" (required for dynamic write tools) +} + +// DynamicToolRule describes a rule to create dynamic tools from views. +// DEPRECATED: use ToolDefinition instead. Retained for backwards compatibility. type DynamicToolRule struct { Name string `json:"name" yaml:"name"` Regexp string `json:"regexp" yaml:"regexp"` Prefix string `json:"prefix" yaml:"prefix"` + // Type and Mode are accepted so DynamicToolRule can round-trip through + // the new unified Tools path without losing information. + Type string `json:"type" yaml:"type"` // "read" or "write" + Mode string `json:"mode" yaml:"mode"` // "insert" for write tools } // LogLevel defines the logging level diff --git a/pkg/server/server.go b/pkg/server/server.go index 85a011e8..cfa80313 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "io" + "math" "net/http" "reflect" "regexp" @@ -89,15 +90,16 @@ type ClickHouseJWEServer struct { oidcConfigCacheURL string oidcConfigMu sync.RWMutex oidcConfigTime time.Time - blockedClauses map[string]bool + blockedClauses map[string]bool } type dynamicToolParam struct { - Name string - CHType string - JSONType string - JSONFormat string - Required bool + Name string + CHType string + JSONType string + JSONFormat string + Required bool + Description string // resolved from column COMMENT or JSON COMMENT params; empty → falls back to CHType } type dynamicToolMeta struct { @@ -108,12 +110,15 @@ type dynamicToolMeta struct { Description string Annotations *mcp.ToolAnnotations Params []dynamicToolParam + ToolType string // "read" (view) or "write" (table) + WriteMode string // "insert" for write tools; empty for read tools } type dynamicToolCommentMetadata struct { Title string `json:"title"` Description string `json:"description"` Annotations *dynamicToolCommentAnnotations `json:"annotations"` + Params map[string]string `json:"params"` } type dynamicToolCommentAnnotations struct { @@ -153,15 +158,18 @@ func NewClickHouseMCPServer(cfg config.Config, version string) *ClickHouseJWESer }, opts) chJweServer := &ClickHouseJWEServer{ - MCPServer: srv, - Config: cfg, - Version: version, - dynamicTools: make(map[string]dynamicToolMeta), + MCPServer: srv, + Config: cfg, + Version: version, + dynamicTools: make(map[string]dynamicToolMeta), blockedClauses: NormalizeBlockedClauses(cfg.Server.BlockedQueryClauses), } - // Register tools, resources, and prompts - RegisterTools(chJweServer, cfg) + // Register tools, resources, and prompts. + // Pass pointer to the server's Config so RegisterTools can store converted + // dynamic-tool rules back into Config.Server.DynamicTools for EnsureDynamicTools + // to consume later. + RegisterTools(chJweServer, &chJweServer.Config) // dynamic tools registered lazily via EnsureDynamicTools RegisterResources(chJweServer) RegisterPrompts(chJweServer) @@ -1078,39 +1086,228 @@ func (s *ClickHouseJWEServer) GetClickHouseClientWithOAuth(ctx context.Context, // look details in https://github.com/Altinity/altinity-mcp/issues/19 var ErrJSONEscaper = strings.NewReplacer("'", "\u0027", "`", "\u0060") -// RegisterTools adds the ClickHouse tools to the MCP server. -// When cfg.Server.ToolInputSettings is non-empty, a "settings" property is -// added to every query-executing tool's schema. -func RegisterTools(srv AltinityMCPServer, cfg config.Config) { +// RegisterTools adds ClickHouse tools to the MCP server. It accepts either +// the new unified Tools configuration or the legacy DynamicTools form +// (deprecated; converted automatically with a warning). With no config, +// it registers execute_query (read-only) and write_query (destructive) +// as defaults. +// +// cfg is a pointer because converted dynamic-tool rules are stored back +// into cfg.Server.DynamicTools so EnsureDynamicTools can discover them +// later on the first authenticated request. +func RegisterTools(srv AltinityMCPServer, cfg *config.Config) { + toolsToRegister := resolveToolDefinitions(cfg) + + staticToolCount := 0 + dynamicRules := make([]config.ToolDefinition, 0, len(toolsToRegister)) + + for _, td := range toolsToRegister { + if td.Type != "read" && td.Type != "write" { + log.Error().Str("type", td.Type).Msg("Invalid tool type, must be 'read' or 'write'") + continue + } + + switch { + case td.Name != "" && td.Regexp == "": + // Static tool: bound to a known name. + if registerStaticTool(srv, td, &cfg.Server, cfg.ClickHouse.ReadOnly) { + staticToolCount++ + } + case td.Regexp != "": + // Dynamic tool: discovered from ClickHouse metadata at first use. + if td.Type == "write" { + if td.Mode == "" { + log.Error().Str("regexp", td.Regexp).Msg("Write tool must specify mode (only 'insert' is supported)") + continue + } + if td.Mode != "insert" { + log.Error().Str("regexp", td.Regexp).Str("mode", td.Mode).Msg("Write tool mode not supported (only 'insert' is implemented); skipping") + continue + } + } + dynamicRules = append(dynamicRules, td) + default: + log.Error().Str("name", td.Name).Str("regexp", td.Regexp).Msg("Tool definition must have either name (static) or regexp (dynamic)") + } + } + + // Stash dynamic rules in the legacy slice that EnsureDynamicTools reads. + cfg.Server.DynamicTools = convertToDynamicToolRules(dynamicRules) + + log.Info(). + Int("static_tool_count", staticToolCount). + Int("dynamic_tool_rules", len(dynamicRules)). + Msg("ClickHouse tools registered") +} + +// resolveToolDefinitions picks the source of tool definitions from config: +// the new unified Tools array, the legacy DynamicTools slice (with a +// deprecation warning), or a sensible default (execute_query + write_query). +func resolveToolDefinitions(cfg *config.Config) []config.ToolDefinition { + if len(cfg.Server.Tools) > 0 { + return cfg.Server.Tools + } + if len(cfg.Server.DynamicTools) > 0 { + log.Warn().Msg("dynamic_tools config is deprecated, use tools instead") + out := make([]config.ToolDefinition, 0, len(cfg.Server.DynamicTools)) + for _, old := range cfg.Server.DynamicTools { + td := config.ToolDefinition{ + Type: old.Type, + Name: old.Name, + Regexp: old.Regexp, + Prefix: old.Prefix, + Mode: old.Mode, + } + // Legacy DynamicToolRule entries had no Type; they described view-based read tools. + if td.Type == "" && td.Regexp != "" { + td.Type = "read" + } + out = append(out, td) + } + return out + } + return []config.ToolDefinition{ + {Type: "read", Name: "execute_query"}, + {Type: "write", Name: "write_query"}, + } +} + +// registerStaticTool registers one of the supported static tools ("execute_query" +// or "write_query"). Returns true if the tool was actually added to srv. +func registerStaticTool(srv AltinityMCPServer, td config.ToolDefinition, srvCfg *config.ServerConfig, readOnly bool) bool { + switch td.Type { + case "read": + if td.Name == "execute_query" { + srv.AddTool(buildExecuteQueryTool(srvCfg), HandleReadOnlyQuery) + log.Info().Str("tool", "execute_query").Msg("Static read tool registered") + return true + } + log.Warn().Str("tool_name", td.Name).Msg("Unknown static read tool name") + return false + + case "write": + if td.Name == "write_query" { + if readOnly { + log.Info().Str("tool", "write_query").Msg("Write tool skipped (read-only mode)") + return false + } + srv.AddTool(buildWriteQueryTool(srvCfg), HandleExecuteQuery) + log.Info().Str("tool", "write_query").Msg("Static write tool registered") + return true + } + log.Warn().Str("tool_name", td.Name).Msg("Unknown static write tool name") + return false + + default: + log.Error().Str("type", td.Type).Msg("Unknown static tool type") + return false + } +} + +// buildExecuteQueryTool builds the execute_query tool definition. execute_query +// is ALWAYS read-only (regardless of the server's read-only flag); it rejects +// non-SELECT statements at call time via HandleReadOnlyQuery. +func buildExecuteQueryTool(srvCfg *config.ServerConfig) *mcp.Tool { properties := map[string]any{ "query": map[string]any{ "type": "string", - "description": "SQL query to execute. In read-only mode, only SELECT/WITH/SHOW/DESC/EXISTS/EXPLAIN are allowed.", + "description": "Read-only SQL query (SELECT, WITH, SHOW, DESCRIBE, EXISTS, EXPLAIN).", }, "limit": map[string]any{ "type": "number", "description": "Maximum number of rows to return (default: 100000)", }, } - if settingsSchema := buildToolInputSettingsSchema(cfg.Server.ToolInputSettings); settingsSchema != nil { + if settingsSchema := buildToolInputSettingsSchema(srvCfg.ToolInputSettings); settingsSchema != nil { properties["settings"] = settingsSchema } - - executeQueryTool := &mcp.Tool{ + return &mcp.Tool{ Name: "execute_query", Title: "Execute SQL Query", - Description: "Executes a SQL query against ClickHouse and returns the results", - Annotations: makeExecuteQueryAnnotations(cfg.ClickHouse.ReadOnly), + Description: "Executes a read-only SQL query against ClickHouse and returns the results. Only SELECT, WITH, SHOW, DESCRIBE, EXISTS, and EXPLAIN statements are allowed — write operations are rejected; use write_query for those.", + Annotations: &mcp.ToolAnnotations{ + ReadOnlyHint: true, + DestructiveHint: boolPtr(false), + OpenWorldHint: boolPtr(false), + }, + InputSchema: map[string]any{ + "type": "object", + "properties": properties, + "required": []string{"query"}, + }, + } +} + +// buildWriteQueryTool builds the write_query tool definition. write_query +// accepts arbitrary SQL (INSERT, UPDATE, DELETE, ALTER, CREATE, DROP, ...). +// It is not registered at all when the server runs in read-only mode. +func buildWriteQueryTool(srvCfg *config.ServerConfig) *mcp.Tool { + properties := map[string]any{ + "query": map[string]any{ + "type": "string", + "description": "SQL write query (INSERT, UPDATE, DELETE, ALTER, CREATE, DROP, TRUNCATE).", + }, + "limit": map[string]any{ + "type": "number", + "description": "Maximum number of rows to return for queries that produce result sets", + }, + } + if settingsSchema := buildToolInputSettingsSchema(srvCfg.ToolInputSettings); settingsSchema != nil { + properties["settings"] = settingsSchema + } + return &mcp.Tool{ + Name: "write_query", + Title: "Execute Write Query", + Description: "Executes a write query (INSERT, UPDATE, DELETE, ALTER, CREATE, DROP, TRUNCATE) against ClickHouse. Not registered when the server runs in read-only mode.", + Annotations: &mcp.ToolAnnotations{ + ReadOnlyHint: false, + DestructiveHint: boolPtr(true), + OpenWorldHint: boolPtr(false), + }, InputSchema: map[string]any{ "type": "object", "properties": properties, "required": []string{"query"}, }, } +} - srv.AddTool(executeQueryTool, HandleExecuteQuery) +// convertToDynamicToolRules packs unified ToolDefinition entries back into the +// legacy DynamicToolRule shape so EnsureDynamicTools can consume them. +func convertToDynamicToolRules(defs []config.ToolDefinition) []config.DynamicToolRule { + rules := make([]config.DynamicToolRule, len(defs)) + for i, td := range defs { + rules[i] = config.DynamicToolRule{ + Name: td.Name, + Regexp: td.Regexp, + Prefix: td.Prefix, + Type: td.Type, + Mode: td.Mode, + } + } + return rules +} - log.Info().Int("tool_count", 1).Msg("ClickHouse tools registered") +// HandleReadOnlyQuery wraps HandleExecuteQuery with a SELECT-only guard. +// Write-family statements are rejected with a clear error that points the +// client at write_query. +func HandleReadOnlyQuery(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + arguments, err := getArgumentsMap(req) + if err != nil { + return NewToolResultError(err.Error()), nil + } + queryArg, ok := arguments["query"] + if !ok { + return NewToolResultError("query parameter is required"), nil + } + query, ok := queryArg.(string) + if !ok || query == "" { + return NewToolResultError("query parameter must be a non-empty string"), nil + } + if !isSelectQuery(query) { + return NewToolResultError("execute_query only accepts read-only statements (SELECT, WITH, SHOW, DESCRIBE, EXISTS, EXPLAIN). Use write_query for write operations."), nil + } + return HandleExecuteQuery(ctx, req) } // RegisterResources adds ClickHouse resources to the MCP server @@ -1265,11 +1462,38 @@ func RegisterPrompts(srv AltinityMCPServer) { log.Info().Int("prompt_count", 0).Msg("ClickHouse prompts registered") } -// EnsureDynamicTools discovers ClickHouse views and registers MCP/OpenAPI tools +// EnsureDynamicTools discovers dynamic tools (views for reads, tables for writes) +// from ClickHouse and registers them with the MCP server. It's safe to call on +// every request: the fast path short-circuits once init completes. +// +// Discovery is deferred until the caller has usable credentials. In OAuth +// forward mode the Bearer token only arrives on tools/call, not tools/list — +// so the first tools/list just returns static tools, and the first authenticated +// tools/call triggers discovery. The MCP SDK's AddTool automatically fires +// notifications/tools/list_changed, prompting the client to re-fetch. +// +// Concurrency: discovery does CH round-trips which can be slow. We hold the +// write lock only while discovery is in progress. If another goroutine is +// already discovering we return immediately without blocking — concurrent +// tools/list calls see the current (static-only) tool set and get updated +// when the in-flight discovery notifies. func (s *ClickHouseJWEServer) EnsureDynamicTools(ctx context.Context) error { - s.dynamicToolsMu.Lock() + // Fast path: already initialized. + s.dynamicToolsMu.RLock() + if s.dynamicToolsInit { + s.dynamicToolsMu.RUnlock() + return nil + } + s.dynamicToolsMu.RUnlock() + + // Try the write lock; skip if another goroutine is already discovering. + if !s.dynamicToolsMu.TryLock() { + return nil + } defer s.dynamicToolsMu.Unlock() + // Double-check under the write lock — another goroutine may have finished + // between our RUnlock and TryLock. if s.dynamicToolsInit { return nil } @@ -1279,64 +1503,113 @@ func (s *ClickHouseJWEServer) EnsureDynamicTools(ctx context.Context) error { return nil } - // Get ClickHouse client for view discovery. - // Try with the token from context first; if JWE is enabled but no token is present, - // fall back to the static config (e.g. open ClickHouse used for tool discovery). - token := s.ExtractTokenFromCtx(ctx) - chClient, err := s.GetClickHouseClient(ctx, token) + // In forward-OAuth mode with blank static credentials, the OAuth bearer + // isn't in context on the tools/list handshake. Don't mark dynamicToolsInit + // true here — let the next request retry with a real token. + if !s.hasDiscoveryCredentials(ctx) { + log.Debug().Msg("dynamic_tools: no credentials available yet; deferring discovery") + return nil + } + + readTools, err := s.discoverReadTools(ctx) + if err != nil { + return err + } + writeTools, err := s.discoverWriteTools(ctx) if err != nil { - if errors.Is(err, jwe_auth.ErrMissingToken) && s.Config.Server.JWE.Enabled { - // No per-user token available; use static ClickHouse config for discovery. - chClient, err = clickhouse.NewClient(ctx, s.Config.ClickHouse) + return err + } + + s.registerDynamicTools(readTools, writeTools) + s.dynamicToolsInit = true + return nil +} + +// hasDiscoveryCredentials reports whether the current context has any form +// of credentials that can be used to query ClickHouse for tool discovery. +func (s *ClickHouseJWEServer) hasDiscoveryCredentials(ctx context.Context) bool { + if s.ExtractTokenFromCtx(ctx) != "" { + return true + } + if s.ExtractOAuthTokenFromCtx(ctx) != "" { + return true + } + if s.Config.ClickHouse.Username != "" { + return true + } + return false +} + +// getDiscoveryClient returns a ClickHouse client that honors whichever kind +// of credential is available on ctx (JWE token, OAuth bearer, or static +// fallback). Callers must Close() the returned client. +func (s *ClickHouseJWEServer) getDiscoveryClient(ctx context.Context) (*clickhouse.Client, error) { + return s.GetClickHouseClientFromCtx(ctx) +} + +// filterRulesByType returns only the rules matching the requested "read" +// or "write" type. Legacy rules without an explicit Type default to "read" +// when they carry a Regexp. +func filterRulesByType(rules []config.DynamicToolRule, toolType string) []config.DynamicToolRule { + filtered := make([]config.DynamicToolRule, 0, len(rules)) + for _, rule := range rules { + ruleType := rule.Type + if ruleType == "" && rule.Regexp != "" { + ruleType = "read" } - if err != nil { - return fmt.Errorf("dynamic_tools: failed to get ClickHouse client: %w", err) + if ruleType == toolType { + filtered = append(filtered, rule) } } + return filtered +} + +// discoverReadTools scans system.tables for views and produces dynamic read-tool +// metadata for every view that matches a configured read rule. +func (s *ClickHouseJWEServer) discoverReadTools(ctx context.Context) (map[string]dynamicToolMeta, error) { + readRules := filterRulesByType(s.Config.Server.DynamicTools, "read") + if len(readRules) == 0 { + return map[string]dynamicToolMeta{}, nil + } + + chClient, err := s.getDiscoveryClient(ctx) + if err != nil { + return nil, fmt.Errorf("dynamic_tools: failed to get ClickHouse client: %w", err) + } defer func() { if closeErr := chClient.Close(); closeErr != nil { log.Error().Err(closeErr).Msg("dynamic_tools: can't close clickhouse") } }() - // fetch views - q := "SELECT database, name, create_table_query, comment FROM system.tables WHERE engine='View'" - result, err := chClient.ExecuteQuery(ctx, q) + result, err := chClient.ExecuteQuery(ctx, "SELECT database, name, create_table_query, comment FROM system.tables WHERE engine='View'") if err != nil { - return fmt.Errorf("dynamic_tools: failed to list views: %w", err) + return nil, fmt.Errorf("dynamic_tools: failed to list views: %w", err) } - // compile regex rules - type ruleCompiled struct { + type compiledRule struct { r *regexp.Regexp prefix string name string } - rules := make([]ruleCompiled, 0, len(s.Config.Server.DynamicTools)) - for _, rule := range s.Config.Server.DynamicTools { + rules := make([]compiledRule, 0, len(readRules)) + namedMatches := make(map[int][]string) + for i, rule := range readRules { if rule.Regexp == "" { continue } compiled, compErr := regexp.Compile(rule.Regexp) if compErr != nil { - log.Error().Err(compErr).Str("regexp", rule.Regexp).Msg("dynamic_tools: invalid regexp, skipping rule") + log.Error().Err(compErr).Str("regexp", rule.Regexp).Msg("dynamic_tools: invalid read regexp, skipping rule") continue } - rules = append(rules, ruleCompiled{r: compiled, prefix: rule.Prefix, name: rule.Name}) - } - - // detect overlaps: map view -> matched rule indexes - overlaps := false - dynamicCount := 0 - - // Track matches for rules with name field to ensure they match exactly once - namedRuleMatches := make(map[int][]string) // rule index -> matched views - for i, rc := range rules { - if rc.name != "" { - namedRuleMatches[i] = make([]string, 0) + rules = append(rules, compiledRule{r: compiled, prefix: rule.Prefix, name: rule.Name}) + if rule.Name != "" { + namedMatches[i] = nil } } + tools := make(map[string]dynamicToolMeta) for _, row := range result.Rows { if len(row) < 4 { continue @@ -1351,9 +1624,8 @@ func (s *ClickHouseJWEServer) EnsureDynamicTools(ctx context.Context) error { for i, rc := range rules { if rc.r.MatchString(full) { matched = append(matched, i) - // Track named rule matches if rc.name != "" { - namedRuleMatches[i] = append(namedRuleMatches[i], full) + namedMatches[i] = append(namedMatches[i], full) } } } @@ -1361,42 +1633,231 @@ func (s *ClickHouseJWEServer) EnsureDynamicTools(ctx context.Context) error { continue } if len(matched) > 1 { - log.Error().Str("view", full).Msg("dynamic_tools: overlap between rules detected for view") - overlaps = true + log.Error().Str("view", full).Msg("dynamic_tools: overlap between read rules, skipping view") continue } - // single rule match -> register tool rc := rules[matched[0]] - - // Determine tool name var toolName string if rc.name != "" { - // Use explicit name if provided toolName = snakeCase(rc.prefix + rc.name) } else { - // Generate from view name toolName = snakeCase(rc.prefix + full) } params := parseViewParams(create) meta := buildDynamicToolMeta(toolName, db, name, comment, params) - s.dynamicTools[toolName] = meta + meta.ToolType = "read" + tools[toolName] = meta + } - // create MCP tool with parameters using map[string]any for InputSchema - props := make(map[string]any) - for _, p := range meta.Params { - prop := map[string]any{ - "type": p.JSONType, - "description": p.CHType, + // Warn on named rules that matched zero or more than one view. + for i, matches := range namedMatches { + rc := rules[i] + switch { + case len(matches) == 0: + log.Error().Str("name", rc.name).Str("regexp", rc.r.String()).Msg("dynamic_tools: named read rule matched no views") + case len(matches) > 1: + log.Error().Str("name", rc.name).Str("regexp", rc.r.String()).Strs("matched_views", matches).Msg("dynamic_tools: named read rule matched multiple views, expected exactly one") + } + } + + log.Info().Int("tool_count", len(tools)).Msg("Dynamic read tools discovered") + return tools, nil +} + +// discoverWriteTools scans system.tables for writable tables (not Views / +// MaterializedViews / Aliases, and not in system databases) and produces +// dynamic write-tool metadata for every table that matches a configured +// write rule. Skipped entirely when the server is in read-only mode. +func (s *ClickHouseJWEServer) discoverWriteTools(ctx context.Context) (map[string]dynamicToolMeta, error) { + if s.Config.ClickHouse.ReadOnly { + log.Info().Msg("dynamic_tools: write tools disabled in read-only mode") + return map[string]dynamicToolMeta{}, nil + } + + writeRules := filterRulesByType(s.Config.Server.DynamicTools, "write") + if len(writeRules) == 0 { + return map[string]dynamicToolMeta{}, nil + } + + chClient, err := s.getDiscoveryClient(ctx) + if err != nil { + return nil, fmt.Errorf("dynamic_tools: failed to get ClickHouse client: %w", err) + } + defer func() { + if closeErr := chClient.Close(); closeErr != nil { + log.Error().Err(closeErr).Msg("dynamic_tools: can't close clickhouse") + } + }() + + const q = "SELECT database, name, comment FROM system.tables " + + "WHERE engine NOT IN ('View', 'MaterializedView', 'Alias') " + + "AND database NOT IN ('system', 'INFORMATION_SCHEMA')" + result, err := chClient.ExecuteQuery(ctx, q) + if err != nil { + return nil, fmt.Errorf("dynamic_tools: failed to list tables: %w", err) + } + + type compiledRule struct { + r *regexp.Regexp + prefix string + name string + mode string + } + rules := make([]compiledRule, 0, len(writeRules)) + for _, rule := range writeRules { + if rule.Regexp == "" { + continue + } + compiled, compErr := regexp.Compile(rule.Regexp) + if compErr != nil { + log.Error().Err(compErr).Str("regexp", rule.Regexp).Msg("dynamic_tools: invalid write regexp, skipping rule") + continue + } + rules = append(rules, compiledRule{r: compiled, prefix: rule.Prefix, name: rule.Name, mode: rule.Mode}) + } + + tools := make(map[string]dynamicToolMeta) + for _, row := range result.Rows { + if len(row) < 3 { + continue + } + db, _ := row[0].(string) + name, _ := row[1].(string) + comment, _ := row[2].(string) + full := db + "." + name + + matched := make([]int, 0) + for i, rc := range rules { + if rc.r.MatchString(full) { + matched = append(matched, i) } - props[p.Name] = prop + } + if len(matched) == 0 { + continue + } + if len(matched) > 1 { + log.Error().Str("table", full).Msg("dynamic_tools: overlap between write rules, skipping table") + continue + } + + rc := rules[matched[0]] + cols, colErr := s.getTableColumnsForMode(ctx, chClient, db, name) + if colErr != nil { + log.Warn().Err(colErr).Str("table", full).Msg("dynamic_tools: failed to get columns for write tool, skipping") + continue + } + if metadata, ok := parseDynamicToolComment(comment); ok { + applyCommentParamOverrides(cols, metadata) + } + + var toolName string + if rc.name != "" { + toolName = snakeCase(rc.prefix + rc.name) + } else { + toolName = snakeCase(rc.prefix + full) + } + + tools[toolName] = dynamicToolMeta{ + ToolName: toolName, + Title: humanizeToolName(toolName), + Database: db, + Table: name, + Description: buildWriteToolDescription(comment, db, name, rc.mode), + Annotations: &mcp.ToolAnnotations{ + ReadOnlyHint: false, + DestructiveHint: boolPtr(true), + OpenWorldHint: boolPtr(false), + }, + Params: cols, + ToolType: "write", + WriteMode: rc.mode, + } + } + + log.Info().Int("tool_count", len(tools)).Msg("Dynamic write tools discovered") + return tools, nil +} + +// getTableColumnsForMode loads columns for a given table and filters out those +// that can't be populated by a client (MATERIALIZED and ALIAS). +// +// Note: we intentionally select only fields that exist across all supported +// ClickHouse versions. Some older versions (e.g., 26.1.x Altinity Antalya) +// do not expose a `column_type` column, so we rely on `default_kind` alone +// which carries the same information for our purposes. +func (s *ClickHouseJWEServer) getTableColumnsForMode(ctx context.Context, chClient *clickhouse.Client, db, table string) ([]dynamicToolParam, error) { + q := fmt.Sprintf( + "SELECT name, type, default_kind, comment FROM system.columns WHERE database='%s' AND table='%s' ORDER BY position", + db, table, + ) + result, err := chClient.ExecuteQuery(ctx, q) + if err != nil { + return nil, err + } + + params := make([]dynamicToolParam, 0, len(result.Rows)) + for _, row := range result.Rows { + if len(row) < 4 { + continue + } + name, _ := row[0].(string) + chType, _ := row[1].(string) + defaultKind, _ := row[2].(string) + comment, _ := row[3].(string) + + // MATERIALIZED and ALIAS columns are computed server-side; clients must not + // supply values for them. All remaining columns are optional in INSERT: + // omitted fields are filled by ClickHouse from DEFAULT expressions or the + // type's zero/default value. + if defaultKind == "MATERIALIZED" || defaultKind == "ALIAS" { + continue + } + + jsonType, jsonFmt := mapCHType(chType) + params = append(params, dynamicToolParam{ + Name: name, + CHType: chType, + JSONType: jsonType, + JSONFormat: jsonFmt, + Required: false, + Description: strings.TrimSpace(comment), + }) + } + return params, nil +} + +// buildWriteToolDescription renders a human-readable description for a +// discovered write tool. Falls back to a mode-specific default when the +// table has no COMMENT. +func buildWriteToolDescription(comment, db, table, mode string) string { + if strings.TrimSpace(comment) != "" { + return comment + } + action := "Insert data" + switch mode { + case "update": + action = "Update data" + case "upsert": + action = "Insert or update data" + } + return fmt.Sprintf("%s in %s.%s", action, db, table) +} + +// registerDynamicTools commits discovered read and write tools to the MCP server. +// AddTool automatically fires notifications/tools/list_changed so clients refresh. +func (s *ClickHouseJWEServer) registerDynamicTools(readTools, writeTools map[string]dynamicToolMeta) { + for toolName, meta := range readTools { + s.dynamicTools[toolName] = meta + props := make(map[string]any, len(meta.Params)+1) + for _, p := range meta.Params { + props[p.Name] = buildParamSchema(p) } if settingsSchema := buildToolInputSettingsSchema(s.Config.Server.ToolInputSettings); settingsSchema != nil { props["settings"] = settingsSchema } - - tool := &mcp.Tool{ + s.AddTool(&mcp.Tool{ Name: toolName, Title: meta.Title, Description: meta.Description, @@ -1405,28 +1866,35 @@ func (s *ClickHouseJWEServer) EnsureDynamicTools(ctx context.Context) error { "type": "object", "properties": props, }, - } - s.AddTool(tool, makeDynamicToolHandler(meta)) - dynamicCount++ + }, makeDynamicToolHandler(meta)) } - // Validate named rules matched exactly once - for i, matches := range namedRuleMatches { - rc := rules[i] - if len(matches) == 0 { - log.Error().Str("name", rc.name).Str("regexp", rc.r.String()).Msg("dynamic_tools: named rule matched no views") - } else if len(matches) > 1 { - log.Error().Str("name", rc.name).Str("regexp", rc.r.String()).Strs("matched_views", matches).Msg("dynamic_tools: named rule matched multiple views, expected exactly one") + for toolName, meta := range writeTools { + s.dynamicTools[toolName] = meta + props := make(map[string]any, len(meta.Params)+1) + for _, p := range meta.Params { + props[p.Name] = buildParamSchema(p) } + if settingsSchema := buildToolInputSettingsSchema(s.Config.Server.ToolInputSettings); settingsSchema != nil { + props["settings"] = settingsSchema + } + schema := map[string]any{ + "type": "object", + "properties": props, + } + s.AddTool(&mcp.Tool{ + Name: toolName, + Title: meta.Title, + Description: meta.Description, + Annotations: meta.Annotations, + InputSchema: schema, + }, s.makeDynamicWriteToolHandler(meta)) } - if overlaps { - log.Error().Msg("dynamic_tools: overlaps detected; conflicting views were skipped as per policy 'error on overlap'") - } - log.Info().Int("tool_count", dynamicCount).Msg("Dynamic ClickHouse view tools registered") - - s.dynamicToolsInit = true - return nil + log.Info(). + Int("read_tools", len(readTools)). + Int("write_tools", len(writeTools)). + Msg("Dynamic tools registered") } func makeDynamicToolHandler(meta dynamicToolMeta) ToolHandlerFunc { @@ -1436,7 +1904,10 @@ func makeDynamicToolHandler(meta dynamicToolMeta) ToolHandlerFunc { return nil, fmt.Errorf("can't get JWEServer from context") } - arguments := getArgumentsMap(req) + arguments, err := getArgumentsMap(req) + if err != nil { + return NewToolResultError(err.Error()), nil + } if len(chJweServer.Config.Server.ToolInputSettings) > 0 { var errResult *mcp.CallToolResult @@ -1462,7 +1933,10 @@ func makeDynamicToolHandler(meta dynamicToolMeta) ToolHandlerFunc { for _, p := range meta.Params { if v, ok := arguments[p.Name]; ok { // encode to SQL literal based on expected type - literal := sqlLiteral(p.JSONType, v) + literal, err := sqlLiteralChecked(p.JSONType, v) + if err != nil { + return NewToolResultError(fmt.Sprintf("Invalid argument %q: %v", p.Name, err)), nil + } args = append(args, fmt.Sprintf("%s=%s", p.Name, literal)) } } @@ -1485,22 +1959,137 @@ func makeDynamicToolHandler(meta dynamicToolMeta) ToolHandlerFunc { } } -// getArgumentsMap extracts arguments from a CallToolRequest as a map -func getArgumentsMap(req *mcp.CallToolRequest) map[string]any { - if req.Params.Arguments == nil { - return make(map[string]any) +// makeDynamicWriteToolHandler returns a handler for a discovered dynamic write +// tool. The handler enforces read-only mode, respects tool_input_settings and +// blocked_query_clauses, validates required parameters, and dispatches to the +// mode-specific query builder (currently only "insert"). +func (s *ClickHouseJWEServer) makeDynamicWriteToolHandler(meta dynamicToolMeta) ToolHandlerFunc { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + chJweServer := GetClickHouseJWEServerFromContext(ctx) + if chJweServer == nil { + return nil, fmt.Errorf("can't get JWEServer from context") + } + // Belt-and-suspenders: discoverWriteTools already skips registration in + // read-only mode, but a config reload could toggle the flag at runtime. + if chJweServer.Config.ClickHouse.ReadOnly { + return NewToolResultError("write operations disabled in read-only mode"), nil + } + + arguments, err := getArgumentsMap(req) + if err != nil { + log.Error().Err(err).Str("tool", meta.ToolName).Msg("dynamic_tools: invalid arguments") + return NewToolResultError(err.Error()), nil + } + + if len(chJweServer.Config.Server.ToolInputSettings) > 0 { + var errResult *mcp.CallToolResult + ctx, errResult = applyToolInputSettings(ctx, arguments, chJweServer.Config.Server.ToolInputSettings) + if errResult != nil { + return errResult, nil + } + } + + query, err := buildDynamicWriteQuery(meta, arguments) + if err != nil { + log.Error().Err(err).Str("tool", meta.ToolName).Msg("dynamic_tools: failed to build write query") + return NewToolResultError(fmt.Sprintf("Failed to build query: %v", err)), nil + } + + if clause, clauseErr := checkBlockedClauses(query, chJweServer.blockedClauses); clauseErr != nil { + return NewToolResultError(fmt.Sprintf("Query rejected: %v", clauseErr)), nil + } else if clause != "" { + return NewToolResultError(fmt.Sprintf("Query rejected: %s clause is not allowed", clause)), nil + } + + chClient, err := chJweServer.GetClickHouseClientFromCtx(ctx) + if err != nil { + log.Error().Err(err).Str("tool", meta.ToolName).Msg("dynamic_tools: GetClickHouseClient failed") + return NewToolResultError(fmt.Sprintf("Failed to get ClickHouse client: %v", err)), nil + } + defer func() { + if closeErr := chClient.Close(); closeErr != nil { + log.Error().Err(closeErr).Str("tool", meta.ToolName).Msg("dynamic_tools: close client failed") + } + }() + + log.Debug().Str("tool", meta.ToolName).Str("query", query).Msg("dynamic_tools: executing write query") + if _, err := chClient.ExecuteQuery(ctx, query); err != nil { + log.Error().Err(err).Str("tool", meta.ToolName).Str("query", query).Msg("dynamic_tools: write query failed") + return NewToolResultError(fmt.Sprintf("Query failed: %v", ErrJSONEscaper.Replace(err.Error()))), nil + } + return NewToolResultText(fmt.Sprintf("Successfully executed %s", meta.ToolName)), nil } +} - // Arguments is json.RawMessage, unmarshal it +// buildDynamicWriteQuery dispatches to the mode-specific SQL builder. +// Unsupported modes are rejected at tool-registration time (see RegisterTools), +// so reaching them here is a bug. +func buildDynamicWriteQuery(meta dynamicToolMeta, args map[string]interface{}) (string, error) { + switch meta.WriteMode { + case "insert": + return buildInsertQuery(meta, args) + default: + return "", fmt.Errorf("unsupported write mode %q (only 'insert' is implemented)", meta.WriteMode) + } +} + +// buildInsertQuery renders INSERT INTO db.table (cols...) VALUES (vals...) +// from a dynamic tool's metadata and the client-supplied arguments. Required +// parameters must be present; everything else is optional (columns with DEFAULT +// expressions are simply omitted when not provided). +func buildInsertQuery(meta dynamicToolMeta, args map[string]interface{}) (string, error) { + cols := make([]string, 0, len(meta.Params)) + vals := make([]string, 0, len(meta.Params)) + for _, p := range meta.Params { + v, ok := args[p.Name] + if ok { + cols = append(cols, p.Name) + literal, err := sqlLiteralChecked(p.JSONType, v) + if err != nil { + return "", fmt.Errorf("invalid parameter %s: %w", p.Name, err) + } + vals = append(vals, literal) + continue + } + if p.Required { + return "", fmt.Errorf("required parameter missing: %s", p.Name) + } + } + if len(cols) == 0 { + return "", fmt.Errorf("no columns provided") + } + return fmt.Sprintf( + "INSERT INTO %s.%s (%s) VALUES (%s)", + meta.Database, meta.Table, + strings.Join(cols, ", "), + strings.Join(vals, ", "), + ), nil +} + +// getArgumentsMap extracts the arguments object from an MCP tool call. +// Returns an error when the arguments are present but cannot be parsed as JSON +// — handlers should propagate that error to the client instead of proceeding +// with empty arguments (which produces confusing downstream errors). +func getArgumentsMap(req *mcp.CallToolRequest) (map[string]any, error) { + if req.Params.Arguments == nil { + return make(map[string]any), nil + } var args map[string]any if err := json.Unmarshal(req.Params.Arguments, &args); err != nil { - return make(map[string]any) + return nil, fmt.Errorf("failed to parse tool arguments: %w", err) + } + if args == nil { + // Valid JSON "null" — treat as empty. + return make(map[string]any), nil } - return args + return args, nil } func buildDynamicToolMeta(toolName, db, table, comment string, params []dynamicToolParam) dynamicToolMeta { title, description, annotations := buildToolPresentation(toolName, db, table, comment) + if metadata, ok := parseDynamicToolComment(comment); ok { + applyCommentParamOverrides(params, metadata) + } return dynamicToolMeta{ ToolName: toolName, @@ -1537,6 +2126,41 @@ func parseDynamicToolComment(comment string) (dynamicToolCommentMetadata, bool) return metadata, true } +// applyCommentParamOverrides sets param.Description from the tool-level JSON +// COMMENT's "params" map when present. Called after any column-level comment +// has been applied, so JSON overrides win. +func applyCommentParamOverrides(params []dynamicToolParam, meta dynamicToolCommentMetadata) { + if len(meta.Params) == 0 { + return + } + for i, p := range params { + if desc, ok := meta.Params[p.Name]; ok { + if trimmed := strings.TrimSpace(desc); trimmed != "" { + params[i].Description = trimmed + } + } + } +} + +// buildParamSchema returns the JSON Schema fragment for a single dynamic tool +// parameter. Description resolves to the param's own Description (from a +// column COMMENT or the tool's JSON COMMENT "params" map) and falls back to +// the ClickHouse type string when none was set. +func buildParamSchema(p dynamicToolParam) map[string]any { + desc := p.Description + if desc == "" { + desc = p.CHType + } + schema := map[string]any{ + "type": p.JSONType, + "description": desc, + } + if p.JSONFormat != "" { + schema["format"] = p.JSONFormat + } + return schema +} + func buildTitle(toolName, title string) string { if strings.TrimSpace(title) != "" { return strings.TrimSpace(title) @@ -1575,22 +2199,6 @@ func buildDynamicToolAnnotations(commentAnnotations *dynamicToolCommentAnnotatio return annotations } -func makeExecuteQueryAnnotations(readOnly bool) *mcp.ToolAnnotations { - if readOnly { - return &mcp.ToolAnnotations{ - ReadOnlyHint: true, - DestructiveHint: boolPtr(false), - OpenWorldHint: boolPtr(false), - } - } - - return &mcp.ToolAnnotations{ - ReadOnlyHint: false, - DestructiveHint: boolPtr(true), - OpenWorldHint: boolPtr(false), - } -} - func boolPtr(v bool) *bool { return &v } @@ -1654,46 +2262,69 @@ func mapCHType(chType string) (jsonType, jsonFormat string) { } func sqlLiteral(jsonType string, v interface{}) string { + literal, err := sqlLiteralChecked(jsonType, v) + if err != nil { + switch jsonType { + case "integer", "number", "boolean": + return "0" + default: + return "''" + } + } + return literal +} + +func sqlLiteralChecked(jsonType string, v interface{}) (string, error) { switch jsonType { case "integer": switch n := v.(type) { case float64: - return strconv.FormatInt(int64(n), 10) + if math.Trunc(n) != n { + return "", fmt.Errorf("expected integer, got non-integer number %v", n) + } + return strconv.FormatInt(int64(n), 10), nil case int64: - return strconv.FormatInt(n, 10) + return strconv.FormatInt(n, 10), nil case int: - return strconv.Itoa(n) + return strconv.Itoa(n), nil default: - return "0" + return "", fmt.Errorf("expected integer, got %T", v) } case "number": switch n := v.(type) { case float64: - return strconv.FormatFloat(n, 'f', -1, 64) + return strconv.FormatFloat(n, 'f', -1, 64), nil + case int64: + return strconv.FormatInt(n, 10), nil + case int: + return strconv.Itoa(n), nil default: - return "0" + return "", fmt.Errorf("expected number, got %T", v) } case "boolean": if b, ok := v.(bool); ok { if b { - return "1" + return "1", nil } - return "0" + return "0", nil } - return "0" + return "", fmt.Errorf("expected boolean, got %T", v) default: // string s := "" switch x := v.(type) { case string: s = x default: - b, _ := json.Marshal(v) + b, err := json.Marshal(v) + if err != nil { + return "", fmt.Errorf("failed to encode string value: %w", err) + } s = string(b) } // ClickHouse single-quoted string literal escaping: escape backslashes then single quotes s = strings.ReplaceAll(s, "\\", "\\\\") s = strings.ReplaceAll(s, "'", "\\'") - return "'" + s + "'" + return "'" + s + "'", nil } } @@ -1743,7 +2374,10 @@ func NewToolResultError(errMsg string) *mcp.CallToolResult { // HandleExecuteQuery implements the execute_query tool handler func HandleExecuteQuery(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { // Get arguments from request - arguments := getArgumentsMap(req) + arguments, err := getArgumentsMap(req) + if err != nil { + return NewToolResultError(err.Error()), nil + } queryArg, ok := arguments["query"] if !ok { @@ -2479,8 +3113,8 @@ func buildToolInputSettingsSchema(settings []string) map[string]any { } return map[string]any{ "type": "object", - "description": fmt.Sprintf("Optional ClickHouse settings to apply to this query. Allowed: %s", strings.Join(settings, ", ")), - "properties": props, + "description": fmt.Sprintf("Optional ClickHouse settings to apply to this query. Allowed: %s", strings.Join(settings, ", ")), + "properties": props, "additionalProperties": false, } } diff --git a/pkg/server/server_test.go b/pkg/server/server_test.go index 544d98de..e49d4c9f 100644 --- a/pkg/server/server_test.go +++ b/pkg/server/server_test.go @@ -840,14 +840,16 @@ func TestDynamicToolCommentMetadata(t *testing.T) { func TestRegisterTools_Annotations(t *testing.T) { t.Parallel() - t.Run("read_only_server_marks_execute_query_safe", func(t *testing.T) { + t.Run("read_only_server_skips_write_query_and_marks_execute_query_safe", func(t *testing.T) { t.Parallel() srv := &captureServer{} - RegisterTools(srv, config.Config{ + cfg := config.Config{ ClickHouse: config.ClickHouseConfig{ReadOnly: true}, - }) + } + RegisterTools(srv, &cfg) + // In read-only mode write_query is skipped entirely; only execute_query remains. require.Len(t, srv.tools, 1) tool := srv.tools[0] require.Equal(t, "execute_query", tool.Name) @@ -860,22 +862,39 @@ func TestRegisterTools_Annotations(t *testing.T) { require.False(t, *tool.Annotations.OpenWorldHint) }) - t.Run("read_write_server_marks_execute_query_risky", func(t *testing.T) { + t.Run("read_write_server_registers_both_and_execute_query_still_read_only", func(t *testing.T) { t.Parallel() srv := &captureServer{} - RegisterTools(srv, config.Config{ + cfg := config.Config{ ClickHouse: config.ClickHouseConfig{ReadOnly: false}, - }) + } + RegisterTools(srv, &cfg) + + // Defaults register both execute_query and write_query. + require.Len(t, srv.tools, 2) + + var eq, wq *mcp.Tool + for _, t := range srv.tools { + switch t.Name { + case "execute_query": + eq = t + case "write_query": + wq = t + } + } + require.NotNil(t, eq, "execute_query tool should be registered") + require.NotNil(t, wq, "write_query tool should be registered") - require.Len(t, srv.tools, 1) - tool := srv.tools[0] - require.NotNil(t, tool.Annotations) - require.False(t, tool.Annotations.ReadOnlyHint) - require.NotNil(t, tool.Annotations.DestructiveHint) - require.True(t, *tool.Annotations.DestructiveHint) - require.NotNil(t, tool.Annotations.OpenWorldHint) - require.False(t, *tool.Annotations.OpenWorldHint) + // execute_query is always read-only, regardless of the server's read-only flag. + require.True(t, eq.Annotations.ReadOnlyHint) + require.False(t, *eq.Annotations.DestructiveHint) + require.False(t, *eq.Annotations.OpenWorldHint) + + // write_query is destructive. + require.False(t, wq.Annotations.ReadOnlyHint) + require.True(t, *wq.Annotations.DestructiveHint) + require.False(t, *wq.Annotations.OpenWorldHint) }) } @@ -1497,12 +1516,13 @@ func TestGetArgumentsMap_ErrorPath(t *testing.T) { Arguments: nil, }, } - args := getArgumentsMap(req) + args, err := getArgumentsMap(req) + require.NoError(t, err) require.NotNil(t, args) require.Empty(t, args) }) - t.Run("invalid_json", func(t *testing.T) { + t.Run("invalid_json_returns_error", func(t *testing.T) { t.Parallel() req := &mcp.CallToolRequest{ Params: &mcp.CallToolParamsRaw{ @@ -1510,7 +1530,36 @@ func TestGetArgumentsMap_ErrorPath(t *testing.T) { Arguments: json.RawMessage(`invalid json`), }, } - args := getArgumentsMap(req) + args, err := getArgumentsMap(req) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to parse tool arguments") + require.Nil(t, args) + }) + + t.Run("valid_json_object", func(t *testing.T) { + t.Parallel() + req := &mcp.CallToolRequest{ + Params: &mcp.CallToolParamsRaw{ + Name: "test", + Arguments: json.RawMessage(`{"foo": "bar", "n": 42}`), + }, + } + args, err := getArgumentsMap(req) + require.NoError(t, err) + require.Equal(t, "bar", args["foo"]) + require.Equal(t, float64(42), args["n"]) + }) + + t.Run("json_null_is_empty_map", func(t *testing.T) { + t.Parallel() + req := &mcp.CallToolRequest{ + Params: &mcp.CallToolParamsRaw{ + Name: "test", + Arguments: json.RawMessage(`null`), + }, + } + args, err := getArgumentsMap(req) + require.NoError(t, err) require.NotNil(t, args) require.Empty(t, args) }) @@ -4399,8 +4448,8 @@ func TestOAuthClaimsFromRawClaims(t *testing.T) { t.Run("extra_claims_preserved", func(t *testing.T) { t.Parallel() raw := map[string]interface{}{ - "sub": "user", - "custom1": "value1", + "sub": "user", + "custom1": "value1", "custom_num": float64(42), } claims := oauthClaimsFromRawClaims(raw) @@ -4651,6 +4700,86 @@ func TestParseDynamicToolComment(t *testing.T) { require.NotNil(t, meta.Annotations) require.True(t, *meta.Annotations.OpenWorldHint) }) + t.Run("json_with_params", func(t *testing.T) { + t.Parallel() + meta, ok := parseDynamicToolComment(`{"params":{"user_id":"The user ID","ts":"Event timestamp"}}`) + require.True(t, ok) + require.Equal(t, "The user ID", meta.Params["user_id"]) + require.Equal(t, "Event timestamp", meta.Params["ts"]) + }) +} + +func TestApplyCommentParamOverrides(t *testing.T) { + t.Parallel() + + t.Run("empty_params_is_noop", func(t *testing.T) { + t.Parallel() + params := []dynamicToolParam{{Name: "a", Description: "col-level"}} + applyCommentParamOverrides(params, dynamicToolCommentMetadata{}) + require.Equal(t, "col-level", params[0].Description) + }) + t.Run("json_overrides_existing_description", func(t *testing.T) { + t.Parallel() + params := []dynamicToolParam{{Name: "a", Description: "col-level"}} + meta := dynamicToolCommentMetadata{Params: map[string]string{"a": "json-override"}} + applyCommentParamOverrides(params, meta) + require.Equal(t, "json-override", params[0].Description) + }) + t.Run("json_fills_missing_description", func(t *testing.T) { + t.Parallel() + params := []dynamicToolParam{{Name: "a"}} + meta := dynamicToolCommentMetadata{Params: map[string]string{"a": "from-json"}} + applyCommentParamOverrides(params, meta) + require.Equal(t, "from-json", params[0].Description) + }) + t.Run("whitespace_only_override_ignored", func(t *testing.T) { + t.Parallel() + params := []dynamicToolParam{{Name: "a", Description: "col-level"}} + meta := dynamicToolCommentMetadata{Params: map[string]string{"a": " "}} + applyCommentParamOverrides(params, meta) + require.Equal(t, "col-level", params[0].Description) + }) + t.Run("unmatched_params_preserved", func(t *testing.T) { + t.Parallel() + params := []dynamicToolParam{{Name: "a", Description: "A"}, {Name: "b"}} + meta := dynamicToolCommentMetadata{Params: map[string]string{"c": "nope"}} + applyCommentParamOverrides(params, meta) + require.Equal(t, "A", params[0].Description) + require.Equal(t, "", params[1].Description) + }) +} + +func TestBuildParamSchema(t *testing.T) { + t.Parallel() + + t.Run("description_used_when_set", func(t *testing.T) { + t.Parallel() + p := dynamicToolParam{Name: "uid", CHType: "UInt64", JSONType: "integer", Description: "user id"} + schema := buildParamSchema(p) + require.Equal(t, "integer", schema["type"]) + require.Equal(t, "user id", schema["description"]) + }) + t.Run("fallback_to_chtype_when_empty", func(t *testing.T) { + t.Parallel() + p := dynamicToolParam{Name: "uid", CHType: "UInt64", JSONType: "integer"} + schema := buildParamSchema(p) + require.Equal(t, "UInt64", schema["description"]) + }) + t.Run("json_format_included_when_set", func(t *testing.T) { + t.Parallel() + p := dynamicToolParam{Name: "ts", CHType: "DateTime", JSONType: "string", JSONFormat: "date-time", Description: "event time"} + schema := buildParamSchema(p) + require.Equal(t, "string", schema["type"]) + require.Equal(t, "date-time", schema["format"]) + require.Equal(t, "event time", schema["description"]) + }) + t.Run("json_format_omitted_when_empty", func(t *testing.T) { + t.Parallel() + p := dynamicToolParam{Name: "n", CHType: "UInt64", JSONType: "integer"} + schema := buildParamSchema(p) + _, hasFormat := schema["format"] + require.False(t, hasFormat) + }) } func TestBuildDynamicToolDescription(t *testing.T) { @@ -4762,8 +4891,8 @@ func TestValidateOAuthClaims(t *testing.T) { t.Run("gating_mode_uses_public_auth_server_url_as_issuer", func(t *testing.T) { t.Parallel() s := &ClickHouseJWEServer{Config: config.Config{Server: config.ServerConfig{OAuth: config.OAuthConfig{ - Mode: "gating", - Issuer: "https://original-issuer.com", + Mode: "gating", + Issuer: "https://original-issuer.com", PublicAuthServerURL: "https://public-auth.com", }}}} _, err := s.validateOAuthClaims(&OAuthClaims{Issuer: "https://public-auth.com"}) @@ -4897,6 +5026,35 @@ func TestSqlLiteral(t *testing.T) { }) } +func TestSqlLiteralChecked(t *testing.T) { + t.Parallel() + + t.Run("integer_rejects_string", func(t *testing.T) { + t.Parallel() + _, err := sqlLiteralChecked("integer", "not-a-number") + require.ErrorContains(t, err, "expected integer") + }) + + t.Run("integer_rejects_fractional_float", func(t *testing.T) { + t.Parallel() + _, err := sqlLiteralChecked("integer", 3.14) + require.ErrorContains(t, err, "non-integer number") + }) + + t.Run("boolean_rejects_string", func(t *testing.T) { + t.Parallel() + _, err := sqlLiteralChecked("boolean", "yes") + require.ErrorContains(t, err, "expected boolean") + }) + + t.Run("string_accepts_marshaled_non_string", func(t *testing.T) { + t.Parallel() + result, err := sqlLiteralChecked("string", 42) + require.NoError(t, err) + require.Equal(t, "'42'", result) + }) +} + func TestSnakeCase(t *testing.T) { t.Parallel() tests := []struct { @@ -5347,9 +5505,12 @@ func TestNewClickHouseMCPServerVersionField(t *testing.T) { func TestRegisterToolsAndResources(t *testing.T) { t.Parallel() capture := &captureServer{} - RegisterTools(capture, config.Config{}) - require.Len(t, capture.tools, 1) - require.Equal(t, "execute_query", capture.tools[0].Name) + cfg := config.Config{} + RegisterTools(capture, &cfg) + // Defaults register both execute_query and write_query. + require.Len(t, capture.tools, 2) + names := []string{capture.tools[0].Name, capture.tools[1].Name} + require.ElementsMatch(t, []string{"execute_query", "write_query"}, names) // These just verify no panic RegisterResources(capture) @@ -5540,12 +5701,16 @@ func TestRegisterToolsWithSettings(t *testing.T) { registeredTools = append(registeredTools, tool) }, } - RegisterTools(mock, config.Config{}) - require.Len(t, registeredTools, 1) - schema := registeredTools[0].InputSchema.(map[string]any) - props := schema["properties"].(map[string]any) - _, hasSettings := props["settings"] - require.False(t, hasSettings) + cfg := config.Config{} + RegisterTools(mock, &cfg) + // Defaults register both execute_query and write_query. + require.Len(t, registeredTools, 2) + for _, tool := range registeredTools { + schema := tool.InputSchema.(map[string]any) + props := schema["properties"].(map[string]any) + _, hasSettings := props["settings"] + require.Falsef(t, hasSettings, "tool %q should not have settings property", tool.Name) + } }) t.Run("settings_property_added_when_configured", func(t *testing.T) { @@ -5555,24 +5720,29 @@ func TestRegisterToolsWithSettings(t *testing.T) { registeredTools = append(registeredTools, tool) }, } - RegisterTools(mock, config.Config{ + cfg := config.Config{ Server: config.ServerConfig{ ToolInputSettings: []string{"custom_tenant_id", "custom_org_id"}, }, - }) - require.Len(t, registeredTools, 1) - - schema := registeredTools[0].InputSchema.(map[string]any) - props := schema["properties"].(map[string]any) - settingsSchema, ok := props["settings"].(map[string]any) - require.True(t, ok) - require.Equal(t, "object", settingsSchema["type"]) - require.False(t, settingsSchema["additionalProperties"].(bool)) - - innerProps := settingsSchema["properties"].(map[string]any) - require.Len(t, innerProps, 2) - require.Contains(t, innerProps, "custom_tenant_id") - require.Contains(t, innerProps, "custom_org_id") + } + RegisterTools(mock, &cfg) + // Defaults register both execute_query and write_query; each should + // have the settings property wired in. + require.Len(t, registeredTools, 2) + + for _, tool := range registeredTools { + schema := tool.InputSchema.(map[string]any) + props := schema["properties"].(map[string]any) + settingsSchema, ok := props["settings"].(map[string]any) + require.Truef(t, ok, "tool %q missing settings property", tool.Name) + require.Equal(t, "object", settingsSchema["type"]) + require.False(t, settingsSchema["additionalProperties"].(bool)) + + innerProps := settingsSchema["properties"].(map[string]any) + require.Len(t, innerProps, 2) + require.Contains(t, innerProps, "custom_tenant_id") + require.Contains(t, innerProps, "custom_org_id") + } }) } @@ -5730,6 +5900,315 @@ func (m *mockMCPServer) AddTool(tool *mcp.Tool, handler ToolHandlerFunc) { m.addToolFn(tool, handler) } } -func (m *mockMCPServer) AddResource(_ *mcp.Resource, _ ResourceHandlerFunc) {} +func (m *mockMCPServer) AddResource(_ *mcp.Resource, _ ResourceHandlerFunc) {} func (m *mockMCPServer) AddResourceTemplate(_ *mcp.ResourceTemplate, _ ResourceHandlerFunc) {} -func (m *mockMCPServer) AddPrompt(_ *mcp.Prompt, _ PromptHandlerFunc) {} +func (m *mockMCPServer) AddPrompt(_ *mcp.Prompt, _ PromptHandlerFunc) {} + +// --- PR 1: unified tools config + dynamic write tool discovery --- + +// TestRegisterTools_UnifiedConfig covers the new Tools config path. +func TestRegisterTools_UnifiedConfig(t *testing.T) { + t.Parallel() + + t.Run("only_static_read_tool_registered", func(t *testing.T) { + t.Parallel() + srv := &captureServer{} + cfg := config.Config{ + Server: config.ServerConfig{ + Tools: []config.ToolDefinition{ + {Type: "read", Name: "execute_query"}, + }, + }, + } + RegisterTools(srv, &cfg) + require.Len(t, srv.tools, 1) + require.Equal(t, "execute_query", srv.tools[0].Name) + // No dynamic rules survived. + require.Empty(t, cfg.Server.DynamicTools) + }) + + t.Run("dynamic_rules_preserved_in_DynamicTools", func(t *testing.T) { + t.Parallel() + srv := &captureServer{} + cfg := config.Config{ + Server: config.ServerConfig{ + Tools: []config.ToolDefinition{ + {Type: "read", Name: "execute_query"}, + {Type: "read", Regexp: `^analytics\..*_view$`, Prefix: "ro_"}, + {Type: "write", Regexp: `^events\..*$`, Prefix: "log_", Mode: "insert"}, + }, + }, + } + RegisterTools(srv, &cfg) + // Only execute_query is static (dynamic tools get registered lazily). + require.Len(t, srv.tools, 1) + // Dynamic rules were converted into DynamicTools for EnsureDynamicTools. + require.Len(t, cfg.Server.DynamicTools, 2) + // Ordering is preserved, so rule 0 is the read rule, rule 1 is the write rule. + require.Equal(t, "read", cfg.Server.DynamicTools[0].Type) + require.Equal(t, "write", cfg.Server.DynamicTools[1].Type) + require.Equal(t, "insert", cfg.Server.DynamicTools[1].Mode) + }) + + t.Run("invalid_mode_rejected_at_registration", func(t *testing.T) { + t.Parallel() + srv := &captureServer{} + cfg := config.Config{ + Server: config.ServerConfig{ + Tools: []config.ToolDefinition{ + {Type: "read", Name: "execute_query"}, + // Should survive — insert is supported. + {Type: "write", Regexp: `^ok\..*$`, Prefix: "ok_", Mode: "insert"}, + // Should be rejected — update/upsert not implemented. + {Type: "write", Regexp: `^bad1\..*$`, Prefix: "x_", Mode: "update"}, + {Type: "write", Regexp: `^bad2\..*$`, Prefix: "x_", Mode: "upsert"}, + // Should be rejected — mode required for write. + {Type: "write", Regexp: `^bad3\..*$`, Prefix: "x_"}, + }, + }, + } + RegisterTools(srv, &cfg) + require.Len(t, cfg.Server.DynamicTools, 1) + require.Equal(t, "insert", cfg.Server.DynamicTools[0].Mode) + }) + + t.Run("legacy_dynamic_tools_still_works_with_warning", func(t *testing.T) { + t.Parallel() + srv := &captureServer{} + cfg := config.Config{ + Server: config.ServerConfig{ + // Legacy rule: no Type → defaults to "read". + DynamicTools: []config.DynamicToolRule{ + {Regexp: `^mydb\..*$`, Prefix: "get_"}, + }, + }, + } + RegisterTools(srv, &cfg) + require.Len(t, cfg.Server.DynamicTools, 1) + require.Equal(t, "read", cfg.Server.DynamicTools[0].Type) + }) + + t.Run("static_tool_with_unknown_name_ignored", func(t *testing.T) { + t.Parallel() + srv := &captureServer{} + cfg := config.Config{ + Server: config.ServerConfig{ + Tools: []config.ToolDefinition{ + {Type: "read", Name: "execute_query"}, + {Type: "read", Name: "query_something_unknown"}, + }, + }, + } + RegisterTools(srv, &cfg) + require.Len(t, srv.tools, 1) + require.Equal(t, "execute_query", srv.tools[0].Name) + }) +} + +// TestFilterRulesByType covers the read/write rule splitter. +func TestFilterRulesByType(t *testing.T) { + t.Parallel() + rules := []config.DynamicToolRule{ + {Regexp: `^a\..*$`, Type: "read"}, + {Regexp: `^b\..*$`, Type: "write", Mode: "insert"}, + {Regexp: `^c\..*$`}, // no type — defaults to "read" + {Regexp: `^d\..*$`, Type: "write", Mode: "insert"}, + } + reads := filterRulesByType(rules, "read") + writes := filterRulesByType(rules, "write") + require.Len(t, reads, 2) + require.Len(t, writes, 2) +} + +// TestHasDiscoveryCredentials covers the credential-presence check. +func TestHasDiscoveryCredentials(t *testing.T) { + t.Parallel() + + t.Run("none_present", func(t *testing.T) { + t.Parallel() + s := &ClickHouseJWEServer{} + require.False(t, s.hasDiscoveryCredentials(context.Background())) + }) + + t.Run("jwe_token_present", func(t *testing.T) { + t.Parallel() + s := &ClickHouseJWEServer{} + ctx := context.WithValue(context.Background(), JWETokenKey, "jwe-abc") + require.True(t, s.hasDiscoveryCredentials(ctx)) + }) + + t.Run("oauth_token_present", func(t *testing.T) { + t.Parallel() + s := &ClickHouseJWEServer{} + ctx := context.WithValue(context.Background(), OAuthTokenKey, "oauth-xyz") + require.True(t, s.hasDiscoveryCredentials(ctx)) + }) + + t.Run("static_username_present", func(t *testing.T) { + t.Parallel() + s := &ClickHouseJWEServer{ + Config: config.Config{ClickHouse: config.ClickHouseConfig{Username: "alice"}}, + } + require.True(t, s.hasDiscoveryCredentials(context.Background())) + }) +} + +// TestBuildInsertQuery covers the pure-function INSERT SQL generation — +// quote escaping, validation, unicode, null bytes. +func TestBuildInsertQuery(t *testing.T) { + t.Parallel() + + mkMeta := func(params ...dynamicToolParam) dynamicToolMeta { + return dynamicToolMeta{Database: "mydb", Table: "users", Params: params} + } + + t.Run("basic_string_and_int", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "id", JSONType: "integer", Required: true}, + dynamicToolParam{Name: "name", JSONType: "string", Required: true}, + ) + q, err := buildInsertQuery(meta, map[string]any{"id": float64(42), "name": "Alice"}) + require.NoError(t, err) + require.Contains(t, q, "INSERT INTO mydb.users") + require.Contains(t, q, "id, name") + require.Contains(t, q, "42") + require.Contains(t, q, "'Alice'") + }) + + t.Run("required_param_missing", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "id", JSONType: "integer", Required: true}, + dynamicToolParam{Name: "name", JSONType: "string", Required: true}, + ) + _, err := buildInsertQuery(meta, map[string]any{"id": float64(42)}) + require.Error(t, err) + require.Contains(t, err.Error(), "name") + }) + + t.Run("optional_param_omitted", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "id", JSONType: "integer", Required: true}, + dynamicToolParam{Name: "note", JSONType: "string", Required: false}, + ) + q, err := buildInsertQuery(meta, map[string]any{"id": float64(42)}) + require.NoError(t, err) + require.Contains(t, q, "INSERT INTO mydb.users (id) VALUES (42)") + require.NotContains(t, q, "note") + }) + + t.Run("no_columns_provided", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "note", JSONType: "string", Required: false}, + ) + _, err := buildInsertQuery(meta, map[string]any{}) + require.Error(t, err) + require.Contains(t, err.Error(), "no columns") + }) + + t.Run("quote_escaping_keeps_literal_well_formed", func(t *testing.T) { + t.Parallel() + // ClickHouse doesn't support multi-statement queries, so classic + // stacked-query injection isn't applicable. What we verify: a user + // quote must be escaped so the whole payload stays inside ONE string + // literal (balanced outer quotes after escape). + meta := mkMeta( + dynamicToolParam{Name: "name", JSONType: "string", Required: true}, + ) + q, err := buildInsertQuery(meta, map[string]any{"name": "it's \"fine\""}) + require.NoError(t, err) + stripped := strings.ReplaceAll(q, `\\`, "") + stripped = strings.ReplaceAll(stripped, `\'`, "") + require.Equal(t, 2, strings.Count(stripped, "'"), + "expected exactly 2 unescaped single quotes (literal boundaries), got: %s", q) + }) + + t.Run("backslash_escaping", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "path", JSONType: "string", Required: true}, + ) + q, err := buildInsertQuery(meta, map[string]any{"path": `C:\Users\x`}) + require.NoError(t, err) + require.Contains(t, q, "INSERT INTO mydb.users") + }) + + t.Run("unicode_values", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "name", JSONType: "string", Required: true}, + ) + q, err := buildInsertQuery(meta, map[string]any{"name": "日本語 — €"}) + require.NoError(t, err) + require.Contains(t, q, "日本語 — €") + }) + + t.Run("null_byte_in_string_keeps_literal_well_formed", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "blob", JSONType: "string", Required: true}, + ) + q, err := buildInsertQuery(meta, map[string]any{"blob": "before\x00after"}) + require.NoError(t, err) + require.True(t, strings.Count(q, "'")%2 == 0, "unbalanced quotes in: %s", q) + }) + + t.Run("invalid_integer_rejected", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "id", JSONType: "integer", Required: false}, + ) + _, err := buildInsertQuery(meta, map[string]any{"id": "abc"}) + require.ErrorContains(t, err, "invalid parameter id") + require.ErrorContains(t, err, "expected integer") + }) + + t.Run("fractional_integer_rejected", func(t *testing.T) { + t.Parallel() + meta := mkMeta( + dynamicToolParam{Name: "id", JSONType: "integer", Required: false}, + ) + _, err := buildInsertQuery(meta, map[string]any{"id": 1.5}) + require.ErrorContains(t, err, "non-integer number") + }) +} + +// TestBuildDynamicWriteQuery covers the mode dispatcher. +func TestBuildDynamicWriteQuery(t *testing.T) { + t.Parallel() + base := dynamicToolMeta{ + Database: "mydb", Table: "t", + Params: []dynamicToolParam{{Name: "id", JSONType: "integer", Required: true}}, + } + + t.Run("insert_mode", func(t *testing.T) { + t.Parallel() + m := base + m.WriteMode = "insert" + q, err := buildDynamicWriteQuery(m, map[string]any{"id": float64(1)}) + require.NoError(t, err) + require.Contains(t, q, "INSERT INTO mydb.t") + }) + + t.Run("unsupported_mode_rejected", func(t *testing.T) { + t.Parallel() + for _, mode := range []string{"update", "upsert", "delete", ""} { + m := base + m.WriteMode = mode + _, err := buildDynamicWriteQuery(m, map[string]any{"id": float64(1)}) + require.Errorf(t, err, "mode=%q should error", mode) + } + }) +} + +// TestBuildWriteToolDescription covers description fallbacks. +func TestBuildWriteToolDescription(t *testing.T) { + t.Parallel() + require.Equal(t, "table comment", buildWriteToolDescription("table comment", "db", "t", "insert")) + require.Equal(t, "Insert data in db.t", buildWriteToolDescription("", "db", "t", "insert")) + require.Equal(t, "Update data in db.t", buildWriteToolDescription("", "db", "t", "update")) + require.Equal(t, "Insert or update data in db.t", buildWriteToolDescription("", "db", "t", "upsert")) +}