swarm repositories / source
aboutsummaryrefslogtreecommitdiff
path: root/crates/fidget-spinner-cli/src/mcp/catalog.rs
diff options
context:
space:
mode:
authormain <main@swarm.moe>2026-03-20 16:00:30 -0400
committermain <main@swarm.moe>2026-03-20 16:00:30 -0400
commit9d63844f3a28fde70b19500422f17379e99e588a (patch)
tree163cfbd65a8d3528346561410ef39eb1183a16f2 /crates/fidget-spinner-cli/src/mcp/catalog.rs
parent22fe3d2ce7478450a1d7443c4ecbd85fd4c46716 (diff)
downloadfidget_spinner-9d63844f3a28fde70b19500422f17379e99e588a.zip
Refound Spinner as an austere frontier ledger
Diffstat (limited to 'crates/fidget-spinner-cli/src/mcp/catalog.rs')
-rw-r--r--crates/fidget-spinner-cli/src/mcp/catalog.rs1380
1 files changed, 719 insertions, 661 deletions
diff --git a/crates/fidget-spinner-cli/src/mcp/catalog.rs b/crates/fidget-spinner-cli/src/mcp/catalog.rs
index ae3ca78..9b486bc 100644
--- a/crates/fidget-spinner-cli/src/mcp/catalog.rs
+++ b/crates/fidget-spinner-cli/src/mcp/catalog.rs
@@ -46,756 +46,814 @@ impl ToolSpec {
}
}
+const TOOL_SPECS: &[ToolSpec] = &[
+ ToolSpec {
+ name: "project.bind",
+ description: "Bind this MCP session to a project root or nested path inside a project store.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "project.status",
+ description: "Read coarse project metadata and ledger counts for the bound project.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "tag.add",
+ description: "Register one repo-local tag with a required description.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "tag.list",
+ description: "List the repo-local tag registry.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "frontier.create",
+ description: "Create a new frontier scope.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "frontier.list",
+ description: "List frontier scopes in the current project.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "frontier.read",
+ description: "Read one frontier record, including its brief.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "frontier.open",
+ description: "Open the bounded frontier overview: brief, active tags, live metrics, active hypotheses, and open experiments.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "frontier.brief.update",
+ description: "Replace or patch the singleton frontier brief.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "frontier.history",
+ description: "Read the frontier revision history.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "hypothesis.record",
+ description: "Record one hypothesis. The body must stay a single paragraph.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "hypothesis.list",
+ description: "List hypotheses, optionally narrowed by frontier or tag.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "hypothesis.read",
+ description: "Read one hypothesis with its local neighborhood, experiments, and artifacts.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "hypothesis.update",
+ description: "Patch hypothesis title, summary, body, tags, influence parents, or archive state.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "hypothesis.history",
+ description: "Read the revision history for one hypothesis.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "experiment.open",
+ description: "Open one experiment anchored to exactly one hypothesis.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "experiment.list",
+ description: "List experiments, optionally narrowed by frontier, hypothesis, status, or tags.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "experiment.read",
+ description: "Read one experiment with its owning hypothesis, local neighborhood, outcome, and artifacts.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "experiment.update",
+ description: "Patch experiment metadata, influence parents, archive state, or replace the closed outcome wholesale.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "experiment.close",
+ description: "Close one open experiment with typed dimensions, structured metrics, verdict, rationale, and optional analysis.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "experiment.history",
+ description: "Read the revision history for one experiment.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "artifact.record",
+ description: "Register an external artifact reference and attach it to frontiers, hypotheses, or experiments. Artifact bodies are never read through Spinner.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "artifact.list",
+ description: "List artifact references, optionally narrowed by frontier, kind, or attachment target.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "artifact.read",
+ description: "Read one artifact reference and its attachment targets.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "artifact.update",
+ description: "Patch artifact metadata or replace its attachment set.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "artifact.history",
+ description: "Read the revision history for one artifact.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "metric.define",
+ description: "Register one project-level metric definition.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "metric.keys",
+ description: "List metric keys, defaulting to the live frontier comparison set.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "metric.best",
+ description: "Rank closed experiments by one metric key with optional frontier, hypothesis, or dimension narrowing.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "run.dimension.define",
+ description: "Register one typed run-dimension key.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ },
+ ToolSpec {
+ name: "run.dimension.list",
+ description: "List registered run dimensions.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "skill.list",
+ description: "List bundled skills shipped with this package.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "skill.show",
+ description: "Return one bundled skill text shipped with this package. Defaults to `fidget-spinner` when name is omitted.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "system.health",
+ description: "Read MCP host health, session binding, worker generation, and rollout state.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::Convergent,
+ },
+ ToolSpec {
+ name: "system.telemetry",
+ description: "Read aggregate MCP host telemetry for this session.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::Convergent,
+ },
+];
+
+const RESOURCE_SPECS: &[ResourceSpec] = &[
+ ResourceSpec {
+ uri: "fidget-spinner://skill/fidget-spinner",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::Convergent,
+ },
+ ResourceSpec {
+ uri: "fidget-spinner://skill/frontier-loop",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::Convergent,
+ },
+];
+
#[must_use]
pub(crate) fn tool_spec(name: &str) -> Option<ToolSpec> {
- match name {
- "project.bind" => Some(ToolSpec {
- name: "project.bind",
- description: "Bind this MCP session to a project root or nested path inside a project store.",
- dispatch: DispatchTarget::Host,
- replay: ReplayContract::NeverReplay,
- }),
- "project.status" => Some(ToolSpec {
- name: "project.status",
- description: "Read local project status, store paths, and git availability for the currently bound project.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "project.schema" => Some(ToolSpec {
- name: "project.schema",
- description: "Read the project-local payload schema and field validation tiers.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "schema.field.upsert" => Some(ToolSpec {
- name: "schema.field.upsert",
- description: "Add or replace one project-local payload schema field definition.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "schema.field.remove" => Some(ToolSpec {
- name: "schema.field.remove",
- description: "Remove one project-local payload schema field definition, optionally narrowed by node-class set.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "tag.add" => Some(ToolSpec {
- name: "tag.add",
- description: "Register one repo-local tag with a required description. Notes may only reference tags from this registry.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "tag.list" => Some(ToolSpec {
- name: "tag.list",
- description: "List repo-local tags available for note and node tagging.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "frontier.list" => Some(ToolSpec {
- name: "frontier.list",
- description: "List frontiers for the current project.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "frontier.status" => Some(ToolSpec {
- name: "frontier.status",
- description: "Read one frontier projection, including open/completed experiment counts and verdict totals.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "frontier.init" => Some(ToolSpec {
- name: "frontier.init",
- description: "Create a new frontier rooted in a contract node.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "node.create" => Some(ToolSpec {
- name: "node.create",
- description: "Create a generic DAG node with project payload fields and optional lineage parents.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "hypothesis.record" => Some(ToolSpec {
- name: "hypothesis.record",
- description: "Record a core-path hypothesis with low ceremony.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "node.list" => Some(ToolSpec {
- name: "node.list",
- description: "List recent nodes. Archived nodes are hidden unless explicitly requested.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "node.read" => Some(ToolSpec {
- name: "node.read",
- description: "Read one node including payload, diagnostics, and hidden annotations.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "node.annotate" => Some(ToolSpec {
- name: "node.annotate",
- description: "Attach a free-form annotation to any node.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "node.archive" => Some(ToolSpec {
- name: "node.archive",
- description: "Archive a node so it falls out of default enumeration without being deleted.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "note.quick" => Some(ToolSpec {
- name: "note.quick",
- description: "Push a quick off-path note without bureaucratic experiment closure.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "source.record" => Some(ToolSpec {
- name: "source.record",
- description: "Record imported sources and documentary context that should live in the DAG without polluting the core path.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "metric.define" => Some(ToolSpec {
- name: "metric.define",
- description: "Register one project-level metric definition so experiment ingestion only has to send key/value observations.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "run.dimension.define" => Some(ToolSpec {
- name: "run.dimension.define",
- description: "Register one project-level run dimension used to slice metrics across scenarios, budgets, and flags.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "run.dimension.list" => Some(ToolSpec {
- name: "run.dimension.list",
- description: "List registered run dimensions together with observed value counts and sample values.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "metric.keys" => Some(ToolSpec {
- name: "metric.keys",
- description: "List rankable metric keys, including registered run metrics and observed payload-derived numeric fields.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "metric.best" => Some(ToolSpec {
- name: "metric.best",
- description: "Rank completed experiments by one numeric key, with optional run-dimension filters.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "metric.migrate" => Some(ToolSpec {
- name: "metric.migrate",
- description: "Re-run the idempotent legacy metric-plane normalization that registers canonical metrics and backfills benchmark_suite dimensions.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "experiment.open" => Some(ToolSpec {
- name: "experiment.open",
- description: "Open a stateful experiment against one hypothesis.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "experiment.list" => Some(ToolSpec {
- name: "experiment.list",
- description: "List currently open experiments, optionally narrowed to one frontier.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "experiment.read" => Some(ToolSpec {
- name: "experiment.read",
- description: "Read one currently open experiment by id.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "experiment.close" => Some(ToolSpec {
- name: "experiment.close",
- description: "Close one open experiment with typed run dimensions, preregistered metric observations, optional analysis, note, and verdict.",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::NeverReplay,
- }),
- "skill.list" => Some(ToolSpec {
- name: "skill.list",
- description: "List bundled skills shipped with this package.",
- dispatch: DispatchTarget::Host,
- replay: ReplayContract::Convergent,
- }),
- "skill.show" => Some(ToolSpec {
- name: "skill.show",
- description: "Return one bundled skill text shipped with this package. Defaults to `fidget-spinner` when name is omitted.",
- dispatch: DispatchTarget::Host,
- replay: ReplayContract::Convergent,
- }),
- "system.health" => Some(ToolSpec {
- name: "system.health",
- description: "Read MCP host health, session binding, worker generation, rollout state, and the last fault.",
- dispatch: DispatchTarget::Host,
- replay: ReplayContract::Convergent,
- }),
- "system.telemetry" => Some(ToolSpec {
- name: "system.telemetry",
- description: "Read aggregate request, retry, restart, and per-operation telemetry for this MCP session.",
- dispatch: DispatchTarget::Host,
- replay: ReplayContract::Convergent,
- }),
- _ => None,
- }
+ TOOL_SPECS.iter().copied().find(|spec| spec.name == name)
}
#[must_use]
pub(crate) fn resource_spec(uri: &str) -> Option<ResourceSpec> {
- match uri {
- "fidget-spinner://project/config" => Some(ResourceSpec {
- uri: "fidget-spinner://project/config",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "fidget-spinner://project/schema" => Some(ResourceSpec {
- uri: "fidget-spinner://project/schema",
- dispatch: DispatchTarget::Worker,
- replay: ReplayContract::Convergent,
- }),
- "fidget-spinner://skill/fidget-spinner" => Some(ResourceSpec {
- uri: "fidget-spinner://skill/fidget-spinner",
- dispatch: DispatchTarget::Host,
- replay: ReplayContract::Convergent,
- }),
- "fidget-spinner://skill/frontier-loop" => Some(ResourceSpec {
- uri: "fidget-spinner://skill/frontier-loop",
- dispatch: DispatchTarget::Host,
- replay: ReplayContract::Convergent,
- }),
- _ => None,
- }
+ RESOURCE_SPECS.iter().copied().find(|spec| spec.uri == uri)
}
#[must_use]
pub(crate) fn tool_definitions() -> Vec<Value> {
- [
- "project.bind",
- "project.status",
- "project.schema",
- "schema.field.upsert",
- "schema.field.remove",
- "tag.add",
- "tag.list",
- "frontier.list",
- "frontier.status",
- "frontier.init",
- "node.create",
- "hypothesis.record",
- "node.list",
- "node.read",
- "node.annotate",
- "node.archive",
- "note.quick",
- "source.record",
- "metric.define",
- "run.dimension.define",
- "run.dimension.list",
- "metric.keys",
- "metric.best",
- "metric.migrate",
- "experiment.open",
- "experiment.list",
- "experiment.read",
- "experiment.close",
- "skill.list",
- "skill.show",
- "system.health",
- "system.telemetry",
- ]
- .into_iter()
- .filter_map(tool_spec)
- .map(|spec| {
- json!({
- "name": spec.name,
- "description": spec.description,
- "inputSchema": with_common_presentation(input_schema(spec.name)),
- "annotations": spec.annotation_json(),
+ TOOL_SPECS
+ .iter()
+ .copied()
+ .map(|spec| {
+ json!({
+ "name": spec.name,
+ "description": spec.description,
+ "annotations": spec.annotation_json(),
+ "inputSchema": tool_input_schema(spec.name),
+ })
})
- })
- .collect()
+ .collect()
}
#[must_use]
pub(crate) fn list_resources() -> Vec<Value> {
- vec![
- json!({
- "uri": "fidget-spinner://project/config",
- "name": "project-config",
- "description": "Project-local store configuration",
- "mimeType": "application/json"
- }),
- json!({
- "uri": "fidget-spinner://project/schema",
- "name": "project-schema",
- "description": "Project-local payload schema and validation tiers",
- "mimeType": "application/json"
- }),
- json!({
- "uri": "fidget-spinner://skill/fidget-spinner",
- "name": "fidget-spinner-skill",
- "description": "Bundled base Fidget Spinner skill text for this package",
- "mimeType": "text/markdown"
- }),
- json!({
- "uri": "fidget-spinner://skill/frontier-loop",
- "name": "frontier-loop-skill",
- "description": "Bundled frontier-loop specialization skill text for this package",
- "mimeType": "text/markdown"
- }),
- ]
+ RESOURCE_SPECS
+ .iter()
+ .map(|spec| {
+ json!({
+ "uri": spec.uri,
+ "name": spec.uri.rsplit('/').next().unwrap_or(spec.uri),
+ "description": resource_description(spec.uri),
+ })
+ })
+ .collect()
}
-fn input_schema(name: &str) -> Value {
- match name {
- "project.status" | "project.schema" | "tag.list" | "skill.list" | "system.health"
- | "system.telemetry" | "run.dimension.list" | "metric.migrate" => {
- json!({"type":"object","additionalProperties":false})
- }
- "schema.field.upsert" => json!({
- "type": "object",
- "properties": {
- "name": { "type": "string", "description": "Project payload field name." },
- "node_classes": { "type": "array", "items": node_class_schema(), "description": "Optional node-class scope. Omit or pass [] for all classes." },
- "presence": field_presence_schema(),
- "severity": diagnostic_severity_schema(),
- "role": field_role_schema(),
- "inference_policy": inference_policy_schema(),
- "value_type": field_value_type_schema(),
- },
- "required": ["name", "presence", "severity", "role", "inference_policy"],
- "additionalProperties": false
- }),
- "schema.field.remove" => json!({
- "type": "object",
- "properties": {
- "name": { "type": "string", "description": "Project payload field name." },
- "node_classes": { "type": "array", "items": node_class_schema(), "description": "Optional exact node-class scope to remove." }
- },
- "required": ["name"],
- "additionalProperties": false
- }),
- "project.bind" => json!({
- "type": "object",
- "properties": {
- "path": { "type": "string", "description": "Project root or any nested path inside a project with .fidget_spinner state." }
- },
- "required": ["path"],
- "additionalProperties": false
- }),
- "tag.add" => json!({
- "type": "object",
- "properties": {
- "name": { "type": "string", "description": "Lowercase repo-local tag name." },
- "description": { "type": "string", "description": "Human-facing tag description." }
- },
- "required": ["name", "description"],
- "additionalProperties": false
- }),
- "skill.show" => json!({
- "type": "object",
- "properties": {
- "name": { "type": "string", "description": "Bundled skill name. Defaults to `fidget-spinner`." }
- },
- "additionalProperties": false
- }),
- "frontier.list" => json!({"type":"object","additionalProperties":false}),
- "frontier.status" => json!({
- "type": "object",
- "properties": {
- "frontier_id": { "type": "string", "description": "Frontier UUID" }
- },
- "required": ["frontier_id"],
- "additionalProperties": false
- }),
- "frontier.init" => json!({
- "type": "object",
- "properties": {
- "label": { "type": "string" },
- "objective": { "type": "string" },
- "contract_title": { "type": "string" },
- "contract_summary": { "type": "string" },
- "benchmark_suites": { "type": "array", "items": { "type": "string" } },
- "promotion_criteria": { "type": "array", "items": { "type": "string" } },
- "primary_metric": metric_spec_schema(),
- "supporting_metrics": { "type": "array", "items": metric_spec_schema() },
- "seed_summary": { "type": "string" }
- },
- "required": ["label", "objective", "contract_title", "benchmark_suites", "promotion_criteria", "primary_metric"],
- "additionalProperties": false
- }),
- "node.create" => json!({
- "type": "object",
- "properties": {
- "class": node_class_schema(),
- "frontier_id": { "type": "string" },
- "title": { "type": "string" },
- "summary": { "type": "string", "description": "Required for `note` and `source` nodes." },
- "tags": { "type": "array", "items": tag_name_schema(), "description": "Required for `note` nodes; optional for other classes." },
- "payload": { "type": "object", "description": "`note` and `source` nodes require a non-empty string `body` field." },
- "annotations": { "type": "array", "items": annotation_schema() },
- "parents": { "type": "array", "items": { "type": "string" } }
- },
- "required": ["class", "title"],
- "additionalProperties": false
- }),
- "hypothesis.record" => json!({
- "type": "object",
- "properties": {
- "frontier_id": { "type": "string" },
- "title": { "type": "string" },
- "summary": { "type": "string" },
- "body": { "type": "string" },
- "annotations": { "type": "array", "items": annotation_schema() },
- "parents": { "type": "array", "items": { "type": "string" } }
- },
- "required": ["frontier_id", "title", "summary", "body"],
- "additionalProperties": false
- }),
- "node.list" => json!({
- "type": "object",
- "properties": {
- "frontier_id": { "type": "string" },
- "class": node_class_schema(),
- "tags": { "type": "array", "items": tag_name_schema() },
- "include_archived": { "type": "boolean" },
- "limit": { "type": "integer", "minimum": 1, "maximum": 500 }
- },
- "additionalProperties": false
- }),
- "node.read" | "node.archive" => json!({
- "type": "object",
- "properties": {
- "node_id": { "type": "string" }
- },
- "required": ["node_id"],
- "additionalProperties": false
- }),
- "node.annotate" => json!({
- "type": "object",
- "properties": {
- "node_id": { "type": "string" },
- "body": { "type": "string" },
- "label": { "type": "string" },
- "visible": { "type": "boolean" }
- },
- "required": ["node_id", "body"],
- "additionalProperties": false
- }),
- "note.quick" => json!({
- "type": "object",
- "properties": {
- "frontier_id": { "type": "string" },
- "title": { "type": "string" },
- "summary": { "type": "string" },
- "body": { "type": "string" },
- "tags": { "type": "array", "items": tag_name_schema() },
- "annotations": { "type": "array", "items": annotation_schema() },
- "parents": { "type": "array", "items": { "type": "string" } }
- },
- "required": ["title", "summary", "body", "tags"],
- "additionalProperties": false
- }),
- "source.record" => json!({
- "type": "object",
- "properties": {
- "frontier_id": { "type": "string" },
- "title": { "type": "string" },
- "summary": { "type": "string" },
- "body": { "type": "string" },
- "tags": { "type": "array", "items": tag_name_schema() },
- "annotations": { "type": "array", "items": annotation_schema() },
- "parents": { "type": "array", "items": { "type": "string" } }
- },
- "required": ["title", "summary", "body"],
- "additionalProperties": false
- }),
- "metric.define" => json!({
- "type": "object",
- "properties": {
- "key": { "type": "string" },
- "unit": metric_unit_schema(),
- "objective": optimization_objective_schema(),
- "description": { "type": "string" }
- },
- "required": ["key", "unit", "objective"],
- "additionalProperties": false
- }),
- "run.dimension.define" => json!({
- "type": "object",
- "properties": {
- "key": { "type": "string" },
- "value_type": field_value_type_schema(),
- "description": { "type": "string" }
- },
- "required": ["key", "value_type"],
- "additionalProperties": false
- }),
- "metric.keys" => json!({
- "type": "object",
- "properties": {
- "frontier_id": { "type": "string" },
- "source": metric_source_schema(),
- "dimensions": { "type": "object" }
- },
- "additionalProperties": false
- }),
- "metric.best" => json!({
- "type": "object",
- "properties": {
- "key": { "type": "string" },
- "frontier_id": { "type": "string" },
- "source": metric_source_schema(),
- "dimensions": { "type": "object" },
- "order": metric_order_schema(),
- "limit": { "type": "integer", "minimum": 1, "maximum": 500 }
- },
- "required": ["key"],
- "additionalProperties": false
- }),
- "experiment.open" => json!({
- "type": "object",
- "properties": {
- "frontier_id": { "type": "string" },
- "hypothesis_node_id": { "type": "string" },
- "title": { "type": "string" },
- "summary": { "type": "string" }
- },
- "required": ["frontier_id", "hypothesis_node_id", "title"],
- "additionalProperties": false
- }),
- "experiment.list" => json!({
- "type": "object",
- "properties": {
- "frontier_id": { "type": "string" }
- },
- "additionalProperties": false
- }),
- "experiment.read" => json!({
- "type": "object",
- "properties": {
- "experiment_id": { "type": "string" }
- },
- "required": ["experiment_id"],
- "additionalProperties": false
- }),
- "experiment.close" => json!({
- "type": "object",
- "properties": {
- "experiment_id": { "type": "string" },
- "run": run_schema(),
- "primary_metric": metric_value_schema(),
- "supporting_metrics": { "type": "array", "items": metric_value_schema() },
- "note": note_schema(),
- "verdict": verdict_schema(),
- "decision_title": { "type": "string" },
- "decision_rationale": { "type": "string" },
- "analysis": analysis_schema()
- },
- "required": [
- "experiment_id",
- "run",
+fn resource_description(uri: &str) -> &'static str {
+ match uri {
+ "fidget-spinner://skill/fidget-spinner" => "Bundled Fidget Spinner operating doctrine.",
+ "fidget-spinner://skill/frontier-loop" => "Bundled frontier-loop specialization.",
+ _ => "Fidget Spinner resource.",
+ }
+}
+
+fn tool_input_schema(name: &str) -> Value {
+ let schema = match name {
+ "project.bind" => object_schema(
+ &[(
+ "path",
+ string_schema("Project root or any nested path inside it."),
+ )],
+ &["path"],
+ ),
+ "project.status" | "tag.list" | "frontier.list" | "run.dimension.list" | "skill.list"
+ | "system.health" | "system.telemetry" => empty_object_schema(),
+ "tag.add" => object_schema(
+ &[
+ ("name", string_schema("Repo-local tag token.")),
+ (
+ "description",
+ string_schema("Human-facing tag description."),
+ ),
+ ],
+ &["name", "description"],
+ ),
+ "frontier.create" => object_schema(
+ &[
+ ("label", string_schema("Short frontier label.")),
+ ("objective", string_schema("Frontier objective.")),
+ ("slug", string_schema("Optional stable frontier slug.")),
+ ],
+ &["label", "objective"],
+ ),
+ "frontier.read" | "frontier.open" | "frontier.history" => object_schema(
+ &[("frontier", selector_schema("Frontier UUID or slug."))],
+ &["frontier"],
+ ),
+ "frontier.brief.update" => object_schema(
+ &[
+ ("frontier", selector_schema("Frontier UUID or slug.")),
+ (
+ "expected_revision",
+ integer_schema("Optimistic concurrency guard."),
+ ),
+ (
+ "situation",
+ nullable_string_schema("Optional frontier situation text."),
+ ),
+ ("roadmap", roadmap_schema()),
+ (
+ "unknowns",
+ string_array_schema("Ordered frontier unknowns."),
+ ),
+ ],
+ &["frontier"],
+ ),
+ "hypothesis.record" => object_schema(
+ &[
+ ("frontier", selector_schema("Owning frontier UUID or slug.")),
+ ("title", string_schema("Terse hypothesis title.")),
+ ("summary", string_schema("One-line hypothesis summary.")),
+ ("body", string_schema("Single-paragraph hypothesis body.")),
+ ("slug", string_schema("Optional stable hypothesis slug.")),
+ ("tags", string_array_schema("Tag names.")),
+ ("parents", vertex_selector_array_schema()),
+ ],
+ &["frontier", "title", "summary", "body"],
+ ),
+ "hypothesis.list" => object_schema(
+ &[
+ (
+ "frontier",
+ selector_schema("Optional frontier UUID or slug."),
+ ),
+ ("tags", string_array_schema("Require all listed tags.")),
+ (
+ "include_archived",
+ boolean_schema("Include archived hypotheses."),
+ ),
+ ("limit", integer_schema("Optional row cap.")),
+ ],
+ &[],
+ ),
+ "hypothesis.read" | "hypothesis.history" => object_schema(
+ &[("hypothesis", selector_schema("Hypothesis UUID or slug."))],
+ &["hypothesis"],
+ ),
+ "hypothesis.update" => object_schema(
+ &[
+ ("hypothesis", selector_schema("Hypothesis UUID or slug.")),
+ (
+ "expected_revision",
+ integer_schema("Optimistic concurrency guard."),
+ ),
+ ("title", string_schema("Replacement title.")),
+ ("summary", string_schema("Replacement summary.")),
+ ("body", string_schema("Replacement single-paragraph body.")),
+ ("tags", string_array_schema("Replacement tag set.")),
+ ("parents", vertex_selector_array_schema()),
+ ("archived", boolean_schema("Archive state override.")),
+ ],
+ &["hypothesis"],
+ ),
+ "experiment.open" => object_schema(
+ &[
+ (
+ "hypothesis",
+ selector_schema("Owning hypothesis UUID or slug."),
+ ),
+ ("title", string_schema("Experiment title.")),
+ ("summary", string_schema("Optional experiment summary.")),
+ ("slug", string_schema("Optional stable experiment slug.")),
+ ("tags", string_array_schema("Tag names.")),
+ ("parents", vertex_selector_array_schema()),
+ ],
+ &["hypothesis", "title"],
+ ),
+ "experiment.list" => object_schema(
+ &[
+ (
+ "frontier",
+ selector_schema("Optional frontier UUID or slug."),
+ ),
+ (
+ "hypothesis",
+ selector_schema("Optional hypothesis UUID or slug."),
+ ),
+ ("tags", string_array_schema("Require all listed tags.")),
+ (
+ "status",
+ enum_string_schema(&["open", "closed"], "Optional experiment status filter."),
+ ),
+ (
+ "include_archived",
+ boolean_schema("Include archived experiments."),
+ ),
+ ("limit", integer_schema("Optional row cap.")),
+ ],
+ &[],
+ ),
+ "experiment.read" | "experiment.history" => object_schema(
+ &[("experiment", selector_schema("Experiment UUID or slug."))],
+ &["experiment"],
+ ),
+ "experiment.update" => object_schema(
+ &[
+ ("experiment", selector_schema("Experiment UUID or slug.")),
+ (
+ "expected_revision",
+ integer_schema("Optimistic concurrency guard."),
+ ),
+ ("title", string_schema("Replacement title.")),
+ (
+ "summary",
+ nullable_string_schema("Replacement summary or explicit null."),
+ ),
+ ("tags", string_array_schema("Replacement tag set.")),
+ ("parents", vertex_selector_array_schema()),
+ ("archived", boolean_schema("Archive state override.")),
+ ("outcome", experiment_outcome_schema()),
+ ],
+ &["experiment"],
+ ),
+ "experiment.close" => object_schema(
+ &[
+ ("experiment", selector_schema("Experiment UUID or slug.")),
+ (
+ "expected_revision",
+ integer_schema("Optimistic concurrency guard."),
+ ),
+ (
+ "backend",
+ enum_string_schema(
+ &["manual", "local_process", "worktree_process", "ssh_process"],
+ "Execution backend.",
+ ),
+ ),
+ ("command", command_schema()),
+ ("dimensions", run_dimensions_schema()),
+ ("primary_metric", metric_value_schema()),
+ ("supporting_metrics", metric_value_array_schema()),
+ (
+ "verdict",
+ enum_string_schema(
+ &["accepted", "kept", "parked", "rejected"],
+ "Closed verdict.",
+ ),
+ ),
+ ("rationale", string_schema("Decision rationale.")),
+ ("analysis", experiment_analysis_schema()),
+ ],
+ &[
+ "experiment",
+ "backend",
+ "command",
+ "dimensions",
"primary_metric",
- "note",
"verdict",
- "decision_title",
- "decision_rationale"
+ "rationale",
],
- "additionalProperties": false
- }),
- _ => json!({"type":"object","additionalProperties":false}),
- }
+ ),
+ "artifact.record" => object_schema(
+ &[
+ (
+ "kind",
+ enum_string_schema(
+ &[
+ "document", "link", "log", "table", "plot", "dump", "binary", "other",
+ ],
+ "Artifact kind.",
+ ),
+ ),
+ ("label", string_schema("Human-facing artifact label.")),
+ ("summary", string_schema("Optional summary.")),
+ (
+ "locator",
+ string_schema(
+ "Opaque locator or URI. Artifact bodies are never read through Spinner.",
+ ),
+ ),
+ ("media_type", string_schema("Optional media type.")),
+ ("slug", string_schema("Optional stable artifact slug.")),
+ ("attachments", attachment_selector_array_schema()),
+ ],
+ &["kind", "label", "locator"],
+ ),
+ "artifact.list" => object_schema(
+ &[
+ (
+ "frontier",
+ selector_schema("Optional frontier UUID or slug."),
+ ),
+ (
+ "kind",
+ enum_string_schema(
+ &[
+ "document", "link", "log", "table", "plot", "dump", "binary", "other",
+ ],
+ "Optional artifact kind.",
+ ),
+ ),
+ ("attached_to", attachment_selector_schema()),
+ ("limit", integer_schema("Optional row cap.")),
+ ],
+ &[],
+ ),
+ "artifact.read" | "artifact.history" => object_schema(
+ &[("artifact", selector_schema("Artifact UUID or slug."))],
+ &["artifact"],
+ ),
+ "artifact.update" => object_schema(
+ &[
+ ("artifact", selector_schema("Artifact UUID or slug.")),
+ (
+ "expected_revision",
+ integer_schema("Optimistic concurrency guard."),
+ ),
+ (
+ "kind",
+ enum_string_schema(
+ &[
+ "document", "link", "log", "table", "plot", "dump", "binary", "other",
+ ],
+ "Replacement artifact kind.",
+ ),
+ ),
+ ("label", string_schema("Replacement label.")),
+ (
+ "summary",
+ nullable_string_schema("Replacement summary or explicit null."),
+ ),
+ ("locator", string_schema("Replacement locator.")),
+ (
+ "media_type",
+ nullable_string_schema("Replacement media type or explicit null."),
+ ),
+ ("attachments", attachment_selector_array_schema()),
+ ],
+ &["artifact"],
+ ),
+ "metric.define" => object_schema(
+ &[
+ ("key", string_schema("Metric key.")),
+ (
+ "unit",
+ enum_string_schema(
+ &["seconds", "bytes", "count", "ratio", "custom"],
+ "Metric unit.",
+ ),
+ ),
+ (
+ "objective",
+ enum_string_schema(
+ &["minimize", "maximize", "target"],
+ "Optimization objective.",
+ ),
+ ),
+ (
+ "visibility",
+ enum_string_schema(
+ &["canonical", "minor", "hidden", "archived"],
+ "Metric visibility tier.",
+ ),
+ ),
+ ("description", string_schema("Optional description.")),
+ ],
+ &["key", "unit", "objective"],
+ ),
+ "metric.keys" => object_schema(
+ &[
+ (
+ "frontier",
+ selector_schema("Optional frontier UUID or slug."),
+ ),
+ (
+ "scope",
+ enum_string_schema(&["live", "visible", "all"], "Registry slice to enumerate."),
+ ),
+ ],
+ &[],
+ ),
+ "metric.best" => object_schema(
+ &[
+ (
+ "frontier",
+ selector_schema("Optional frontier UUID or slug."),
+ ),
+ (
+ "hypothesis",
+ selector_schema("Optional hypothesis UUID or slug."),
+ ),
+ ("key", string_schema("Metric key.")),
+ ("dimensions", run_dimensions_schema()),
+ (
+ "include_rejected",
+ boolean_schema("Include rejected experiments."),
+ ),
+ ("limit", integer_schema("Optional row cap.")),
+ (
+ "order",
+ enum_string_schema(&["asc", "desc"], "Optional explicit ranking direction."),
+ ),
+ ],
+ &["key"],
+ ),
+ "run.dimension.define" => object_schema(
+ &[
+ ("key", string_schema("Dimension key.")),
+ (
+ "value_type",
+ enum_string_schema(
+ &["string", "numeric", "boolean", "timestamp"],
+ "Dimension value type.",
+ ),
+ ),
+ ("description", string_schema("Optional description.")),
+ ],
+ &["key", "value_type"],
+ ),
+ "skill.show" => object_schema(&[("name", string_schema("Bundled skill name."))], &[]),
+ _ => empty_object_schema(),
+ };
+ with_common_presentation(schema)
}
-fn metric_spec_schema() -> Value {
+fn empty_object_schema() -> Value {
json!({
"type": "object",
- "properties": {
- "key": { "type": "string" },
- "unit": metric_unit_schema(),
- "objective": optimization_objective_schema()
- },
- "required": ["key", "unit", "objective"],
- "additionalProperties": false
+ "properties": {},
+ "additionalProperties": false,
})
}
-fn metric_value_schema() -> Value {
+fn object_schema(properties: &[(&str, Value)], required: &[&str]) -> Value {
+ let mut map = serde_json::Map::new();
+ for (key, value) in properties {
+ let _ = map.insert((*key).to_owned(), value.clone());
+ }
json!({
"type": "object",
- "properties": {
- "key": { "type": "string" },
- "value": { "type": "number" }
- },
- "required": ["key", "value"],
- "additionalProperties": false
+ "properties": Value::Object(map),
+ "required": required,
+ "additionalProperties": false,
})
}
-fn annotation_schema() -> Value {
- json!({
- "type": "object",
- "properties": {
- "body": { "type": "string" },
- "label": { "type": "string" },
- "visible": { "type": "boolean" }
- },
- "required": ["body"],
- "additionalProperties": false
- })
+fn string_schema(description: &str) -> Value {
+ json!({ "type": "string", "description": description })
}
-fn analysis_schema() -> Value {
+fn nullable_string_schema(description: &str) -> Value {
json!({
- "type": "object",
- "properties": {
- "title": { "type": "string" },
- "summary": { "type": "string" },
- "body": { "type": "string" }
- },
- "required": ["title", "summary", "body"],
- "additionalProperties": false
+ "description": description,
+ "oneOf": [
+ { "type": "string" },
+ { "type": "null" }
+ ]
})
}
-fn tag_name_schema() -> Value {
- json!({
- "type": "string",
- "pattern": "^[a-z0-9]+(?:[-_/][a-z0-9]+)*$"
- })
+fn integer_schema(description: &str) -> Value {
+ json!({ "type": "integer", "minimum": 0, "description": description })
}
-fn node_class_schema() -> Value {
- json!({
- "type": "string",
- "enum": ["contract", "hypothesis", "run", "analysis", "decision", "source", "note"]
- })
+fn boolean_schema(description: &str) -> Value {
+ json!({ "type": "boolean", "description": description })
}
-fn metric_unit_schema() -> Value {
- json!({
- "type": "string",
- "enum": ["seconds", "bytes", "count", "ratio", "custom"]
- })
+fn enum_string_schema(values: &[&str], description: &str) -> Value {
+ json!({ "type": "string", "enum": values, "description": description })
}
-fn metric_source_schema() -> Value {
+fn string_array_schema(description: &str) -> Value {
json!({
- "type": "string",
- "enum": [
- "run_metric",
- "hypothesis_payload",
- "run_payload",
- "analysis_payload",
- "decision_payload"
- ]
+ "type": "array",
+ "items": { "type": "string" },
+ "description": description
})
}
-fn metric_order_schema() -> Value {
- json!({
- "type": "string",
- "enum": ["asc", "desc"]
- })
+fn selector_schema(description: &str) -> Value {
+ string_schema(description)
}
-fn field_value_type_schema() -> Value {
+fn vertex_selector_schema() -> Value {
json!({
- "type": "string",
- "enum": ["string", "numeric", "boolean", "timestamp"]
+ "type": "object",
+ "properties": {
+ "kind": { "type": "string", "enum": ["hypothesis", "experiment"] },
+ "selector": { "type": "string" }
+ },
+ "required": ["kind", "selector"],
+ "additionalProperties": false
})
}
-fn diagnostic_severity_schema() -> Value {
+fn attachment_selector_schema() -> Value {
json!({
- "type": "string",
- "enum": ["error", "warning", "info"]
+ "type": "object",
+ "properties": {
+ "kind": { "type": "string", "enum": ["frontier", "hypothesis", "experiment"] },
+ "selector": { "type": "string" }
+ },
+ "required": ["kind", "selector"],
+ "additionalProperties": false
})
}
-fn field_presence_schema() -> Value {
- json!({
- "type": "string",
- "enum": ["required", "recommended", "optional"]
- })
+fn vertex_selector_array_schema() -> Value {
+ json!({ "type": "array", "items": vertex_selector_schema() })
+}
+
+fn attachment_selector_array_schema() -> Value {
+ json!({ "type": "array", "items": attachment_selector_schema() })
}
-fn field_role_schema() -> Value {
+fn roadmap_schema() -> Value {
json!({
- "type": "string",
- "enum": ["index", "projection_gate", "render_only", "opaque"]
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "rank": { "type": "integer", "minimum": 0 },
+ "hypothesis": { "type": "string" },
+ "summary": { "type": "string" }
+ },
+ "required": ["rank", "hypothesis"],
+ "additionalProperties": false
+ }
})
}
-fn inference_policy_schema() -> Value {
+fn command_schema() -> Value {
json!({
- "type": "string",
- "enum": ["manual_only", "model_may_infer"]
+ "type": "object",
+ "properties": {
+ "working_directory": { "type": "string" },
+ "argv": { "type": "array", "items": { "type": "string" } },
+ "env": {
+ "type": "object",
+ "additionalProperties": { "type": "string" }
+ }
+ },
+ "required": ["argv"],
+ "additionalProperties": false
})
}
-fn optimization_objective_schema() -> Value {
+fn metric_value_schema() -> Value {
json!({
- "type": "string",
- "enum": ["minimize", "maximize", "target"]
+ "type": "object",
+ "properties": {
+ "key": { "type": "string" },
+ "value": { "type": "number" }
+ },
+ "required": ["key", "value"],
+ "additionalProperties": false
})
}
-fn verdict_schema() -> Value {
+fn metric_value_array_schema() -> Value {
+ json!({ "type": "array", "items": metric_value_schema() })
+}
+
+fn run_dimensions_schema() -> Value {
json!({
- "type": "string",
- "enum": [
- "accepted",
- "kept",
- "parked",
- "rejected"
- ]
+ "type": "object",
+ "additionalProperties": true,
+ "description": "Exact run-dimension filter or outcome dimension map. Values may be strings, numbers, booleans, or RFC3339 timestamps."
})
}
-fn run_schema() -> Value {
+fn experiment_analysis_schema() -> Value {
json!({
"type": "object",
"properties": {
- "title": { "type": "string" },
"summary": { "type": "string" },
- "backend": {
- "type": "string",
- "enum": ["local_process", "worktree_process", "ssh_process"]
- },
- "dimensions": { "type": "object" },
- "command": {
- "type": "object",
- "properties": {
- "working_directory": { "type": "string" },
- "argv": { "type": "array", "items": { "type": "string" } },
- "env": {
- "type": "object",
- "additionalProperties": { "type": "string" }
- }
- },
- "required": ["argv"],
- "additionalProperties": false
- }
+ "body": { "type": "string" }
},
- "required": ["title", "backend", "dimensions", "command"],
+ "required": ["summary", "body"],
"additionalProperties": false
})
}
-fn note_schema() -> Value {
+fn experiment_outcome_schema() -> Value {
json!({
"type": "object",
"properties": {
- "summary": { "type": "string" },
- "next_hypotheses": { "type": "array", "items": { "type": "string" } }
+ "backend": { "type": "string", "enum": ["manual", "local_process", "worktree_process", "ssh_process"] },
+ "command": command_schema(),
+ "dimensions": run_dimensions_schema(),
+ "primary_metric": metric_value_schema(),
+ "supporting_metrics": metric_value_array_schema(),
+ "verdict": { "type": "string", "enum": ["accepted", "kept", "parked", "rejected"] },
+ "rationale": { "type": "string" },
+ "analysis": experiment_analysis_schema()
},
- "required": ["summary"],
+ "required": ["backend", "command", "dimensions", "primary_metric", "verdict", "rationale"],
"additionalProperties": false
})
}