swarm repositories / source
aboutsummaryrefslogtreecommitdiff
path: root/crates/fidget-spinner-cli
diff options
context:
space:
mode:
Diffstat (limited to 'crates/fidget-spinner-cli')
-rw-r--r--crates/fidget-spinner-cli/src/mcp/catalog.rs41
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/runtime.rs338
-rw-r--r--crates/fidget-spinner-cli/src/mcp/output.rs171
-rw-r--r--crates/fidget-spinner-cli/src/mcp/service.rs921
-rw-r--r--crates/fidget-spinner-cli/tests/mcp_hardening.rs167
5 files changed, 1519 insertions, 119 deletions
diff --git a/crates/fidget-spinner-cli/src/mcp/catalog.rs b/crates/fidget-spinner-cli/src/mcp/catalog.rs
index ec57a5c..b23cb31 100644
--- a/crates/fidget-spinner-cli/src/mcp/catalog.rs
+++ b/crates/fidget-spinner-cli/src/mcp/catalog.rs
@@ -1,7 +1,7 @@
use libmcp::ReplayContract;
use serde_json::{Value, json};
-use crate::mcp::output::with_render_property;
+use crate::mcp::output::with_common_presentation;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub(crate) enum DispatchTarget {
@@ -67,6 +67,18 @@ pub(crate) fn tool_spec(name: &str) -> Option<ToolSpec> {
dispatch: DispatchTarget::Worker,
replay: ReplayContract::Convergent,
}),
+ "tag.add" => Some(ToolSpec {
+ name: "tag.add",
+ description: "Register one repo-local tag with a required description. Notes may only reference tags from this registry.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "tag.list" => Some(ToolSpec {
+ name: "tag.list",
+ description: "List repo-local tags available for note and node tagging.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::Convergent,
+ }),
"frontier.list" => Some(ToolSpec {
name: "frontier.list",
description: "List frontiers for the current project.",
@@ -200,6 +212,8 @@ pub(crate) fn tool_definitions() -> Vec<Value> {
"project.bind",
"project.status",
"project.schema",
+ "tag.add",
+ "tag.list",
"frontier.list",
"frontier.status",
"frontier.init",
@@ -223,7 +237,7 @@ pub(crate) fn tool_definitions() -> Vec<Value> {
json!({
"name": spec.name,
"description": spec.description,
- "inputSchema": with_render_property(input_schema(spec.name)),
+ "inputSchema": with_common_presentation(input_schema(spec.name)),
"annotations": spec.annotation_json(),
})
})
@@ -262,7 +276,7 @@ pub(crate) fn list_resources() -> Vec<Value> {
fn input_schema(name: &str) -> Value {
match name {
- "project.status" | "project.schema" | "skill.list" | "system.health"
+ "project.status" | "project.schema" | "tag.list" | "skill.list" | "system.health"
| "system.telemetry" => json!({"type":"object","additionalProperties":false}),
"project.bind" => json!({
"type": "object",
@@ -272,6 +286,15 @@ fn input_schema(name: &str) -> Value {
"required": ["path"],
"additionalProperties": false
}),
+ "tag.add" => json!({
+ "type": "object",
+ "properties": {
+ "name": { "type": "string", "description": "Lowercase repo-local tag name." },
+ "description": { "type": "string", "description": "Human-facing tag description." }
+ },
+ "required": ["name", "description"],
+ "additionalProperties": false
+ }),
"skill.show" => json!({
"type": "object",
"properties": {
@@ -311,6 +334,7 @@ fn input_schema(name: &str) -> Value {
"frontier_id": { "type": "string" },
"title": { "type": "string" },
"summary": { "type": "string" },
+ "tags": { "type": "array", "items": tag_name_schema() },
"payload": { "type": "object" },
"annotations": { "type": "array", "items": annotation_schema() },
"parents": { "type": "array", "items": { "type": "string" } }
@@ -339,6 +363,7 @@ fn input_schema(name: &str) -> Value {
"properties": {
"frontier_id": { "type": "string" },
"class": node_class_schema(),
+ "tags": { "type": "array", "items": tag_name_schema() },
"include_archived": { "type": "boolean" },
"limit": { "type": "integer", "minimum": 1, "maximum": 500 }
},
@@ -369,10 +394,11 @@ fn input_schema(name: &str) -> Value {
"frontier_id": { "type": "string" },
"title": { "type": "string" },
"body": { "type": "string" },
+ "tags": { "type": "array", "items": tag_name_schema() },
"annotations": { "type": "array", "items": annotation_schema() },
"parents": { "type": "array", "items": { "type": "string" } }
},
- "required": ["title", "body"],
+ "required": ["title", "body", "tags"],
"additionalProperties": false
}),
"research.record" => json!({
@@ -462,6 +488,13 @@ fn annotation_schema() -> Value {
})
}
+fn tag_name_schema() -> Value {
+ json!({
+ "type": "string",
+ "pattern": "^[a-z0-9]+(?:[-_/][a-z0-9]+)*$"
+ })
+}
+
fn node_class_schema() -> Value {
json!({
"type": "string",
diff --git a/crates/fidget-spinner-cli/src/mcp/host/runtime.rs b/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
index 17c26c7..f84f604 100644
--- a/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
+++ b/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
@@ -10,7 +10,7 @@ use libmcp::{
remove_snapshot_file, write_snapshot_file,
};
use serde::Serialize;
-use serde_json::{Value, json};
+use serde_json::{Map, Value, json};
use super::{
binary::BinaryRuntime,
@@ -21,7 +21,7 @@ use crate::mcp::catalog::{
DispatchTarget, list_resources, resource_spec, tool_definitions, tool_spec,
};
use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
-use crate::mcp::output::split_render_mode;
+use crate::mcp::output::{ToolOutput, detailed_tool_output, split_presentation, tool_success};
use crate::mcp::protocol::{
CRASH_ONCE_ENV, FORCE_ROLLOUT_ENV, HOST_STATE_ENV, HostRequestId, HostStateSeed,
PROTOCOL_VERSION, ProjectBindingSeed, SERVER_NAME, WorkerOperation, WorkerSpawnConfig,
@@ -385,7 +385,8 @@ impl HostRuntime {
fn handle_host_tool(&mut self, name: &str, arguments: Value) -> Result<Value, FaultRecord> {
let operation = format!("tools/call:{name}");
- let (render, arguments) = split_render_mode(arguments, &operation, FaultStage::Host)?;
+ let (presentation, arguments) =
+ split_presentation(arguments, &operation, FaultStage::Host)?;
match name {
"project.bind" => {
let args = deserialize::<ProjectBindArgs>(arguments, "tools/call:project.bind")?;
@@ -393,13 +394,18 @@ impl HostRuntime {
.map_err(host_store_fault("tools/call:project.bind"))?;
self.worker.rebind(resolved.binding.project_root.clone());
self.binding = Some(resolved.binding);
- tool_success(&resolved.status, render)
+ tool_success(
+ project_bind_output(&resolved.status)?,
+ presentation,
+ FaultStage::Host,
+ "tools/call:project.bind",
+ )
}
"skill.list" => tool_success(
- &json!({
- "skills": crate::bundled_skill::bundled_skill_summaries(),
- }),
- render,
+ skill_list_output()?,
+ presentation,
+ FaultStage::Host,
+ "tools/call:skill.list",
),
"skill.show" => {
let args = deserialize::<SkillShowArgs>(arguments, "tools/call:skill.show")?;
@@ -417,17 +423,14 @@ impl HostRuntime {
},
)?;
tool_success(
- &json!({
- "name": skill.name,
- "description": skill.description,
- "resource_uri": skill.resource_uri,
- "body": skill.body,
- }),
- render,
+ skill_show_output(skill)?,
+ presentation,
+ FaultStage::Host,
+ "tools/call:skill.show",
)
}
- "system.health" => tool_success(
- &HealthSnapshot {
+ "system.health" => {
+ let health = HealthSnapshot {
initialization: InitializationHealth {
ready: self.session_initialized(),
seed_captured: self.seed_captured(),
@@ -443,10 +446,20 @@ impl HostRuntime {
rollout_pending: self.binary.rollout_pending().unwrap_or(false),
},
last_fault: self.telemetry.last_fault.clone(),
- },
- render,
+ };
+ tool_success(
+ system_health_output(&health)?,
+ presentation,
+ FaultStage::Host,
+ "tools/call:system.health",
+ )
+ }
+ "system.telemetry" => tool_success(
+ system_telemetry_output(&self.telemetry)?,
+ presentation,
+ FaultStage::Host,
+ "tools/call:system.telemetry",
),
- "system.telemetry" => tool_success(&self.telemetry, render),
other => Err(FaultRecord::new(
FaultKind::InvalidInput,
FaultStage::Host,
@@ -597,7 +610,7 @@ struct ResolvedProjectBinding {
fn resolve_project_binding(
requested_path: PathBuf,
) -> Result<ResolvedProjectBinding, fidget_spinner_store_sqlite::StoreError> {
- let store = crate::open_store(&requested_path)?;
+ let store = crate::open_or_init_store_for_binding(&requested_path)?;
Ok(ResolvedProjectBinding {
binding: ProjectBinding {
requested_path: requested_path.clone(),
@@ -710,8 +723,287 @@ fn request_id_from_frame(frame: &FramedMessage) -> Option<RequestId> {
}
}
-fn tool_success(value: &impl Serialize, render: libmcp::RenderMode) -> Result<Value, FaultRecord> {
- crate::mcp::output::tool_success(value, render, FaultStage::Host, "tool_success")
+fn project_bind_output(status: &ProjectBindStatus) -> Result<ToolOutput, FaultRecord> {
+ let mut concise = Map::new();
+ let _ = concise.insert("project_root".to_owned(), json!(status.project_root));
+ let _ = concise.insert("state_root".to_owned(), json!(status.state_root));
+ let _ = concise.insert("display_name".to_owned(), json!(status.display_name));
+ let _ = concise.insert(
+ "schema".to_owned(),
+ json!(format!(
+ "{}@{}",
+ status.schema.namespace, status.schema.version
+ )),
+ );
+ let _ = concise.insert(
+ "git_repo_detected".to_owned(),
+ json!(status.git_repo_detected),
+ );
+ if status.requested_path != status.project_root {
+ let _ = concise.insert("requested_path".to_owned(), json!(status.requested_path));
+ }
+ detailed_tool_output(
+ &Value::Object(concise),
+ status,
+ [
+ format!("bound project {}", status.display_name),
+ format!("root: {}", status.project_root),
+ format!("state: {}", status.state_root),
+ format!(
+ "schema: {}@{}",
+ status.schema.namespace, status.schema.version
+ ),
+ format!(
+ "git: {}",
+ if status.git_repo_detected {
+ "detected"
+ } else {
+ "not detected"
+ }
+ ),
+ ]
+ .join("\n"),
+ None,
+ FaultStage::Host,
+ "tools/call:project.bind",
+ )
+}
+
+fn skill_list_output() -> Result<ToolOutput, FaultRecord> {
+ let skills = crate::bundled_skill::bundled_skill_summaries();
+ let concise = json!({
+ "skills": skills.iter().map(|skill| {
+ json!({
+ "name": skill.name,
+ "description": skill.description,
+ })
+ }).collect::<Vec<_>>(),
+ });
+ let mut lines = vec![format!("{} bundled skill(s)", skills.len())];
+ lines.extend(
+ skills
+ .iter()
+ .map(|skill| format!("{}: {}", skill.name, skill.description)),
+ );
+ detailed_tool_output(
+ &concise,
+ &json!({ "skills": skills }),
+ lines.join("\n"),
+ None,
+ FaultStage::Host,
+ "tools/call:skill.list",
+ )
+}
+
+fn skill_show_output(skill: crate::bundled_skill::BundledSkill) -> Result<ToolOutput, FaultRecord> {
+ detailed_tool_output(
+ &json!({
+ "name": skill.name,
+ "resource_uri": skill.resource_uri,
+ "body": skill.body,
+ }),
+ &json!({
+ "name": skill.name,
+ "description": skill.description,
+ "resource_uri": skill.resource_uri,
+ "body": skill.body,
+ }),
+ skill.body,
+ None,
+ FaultStage::Host,
+ "tools/call:skill.show",
+ )
+}
+
+fn system_health_output(health: &HealthSnapshot) -> Result<ToolOutput, FaultRecord> {
+ let mut concise = Map::new();
+ let _ = concise.insert(
+ "ready".to_owned(),
+ json!(health.initialization.ready && health.initialization.seed_captured),
+ );
+ let _ = concise.insert("bound".to_owned(), json!(health.binding.bound));
+ if let Some(project_root) = health.binding.project_root.as_ref() {
+ let _ = concise.insert("project_root".to_owned(), json!(project_root));
+ }
+ let _ = concise.insert(
+ "worker_generation".to_owned(),
+ json!(health.worker.worker_generation),
+ );
+ let _ = concise.insert("worker_alive".to_owned(), json!(health.worker.alive));
+ let _ = concise.insert(
+ "launch_path_stable".to_owned(),
+ json!(health.binary.launch_path_stable),
+ );
+ let _ = concise.insert(
+ "rollout_pending".to_owned(),
+ json!(health.binary.rollout_pending),
+ );
+ if let Some(fault) = health.last_fault.as_ref() {
+ let _ = concise.insert(
+ "last_fault".to_owned(),
+ json!({
+ "kind": format!("{:?}", fault.kind).to_ascii_lowercase(),
+ "stage": format!("{:?}", fault.stage).to_ascii_lowercase(),
+ "operation": fault.operation,
+ "message": fault.message,
+ "retryable": fault.retryable,
+ "retried": fault.retried,
+ }),
+ );
+ }
+
+ let mut lines = vec![format!(
+ "{} | {}",
+ if health.initialization.ready && health.initialization.seed_captured {
+ "ready"
+ } else {
+ "not-ready"
+ },
+ if health.binding.bound {
+ "bound"
+ } else {
+ "unbound"
+ }
+ )];
+ if let Some(project_root) = health.binding.project_root.as_ref() {
+ lines.push(format!("project: {project_root}"));
+ }
+ lines.push(format!(
+ "worker: gen {} {}",
+ health.worker.worker_generation,
+ if health.worker.alive { "alive" } else { "dead" }
+ ));
+ lines.push(format!(
+ "binary: {}{}",
+ if health.binary.launch_path_stable {
+ "stable"
+ } else {
+ "unstable"
+ },
+ if health.binary.rollout_pending {
+ " rollout-pending"
+ } else {
+ ""
+ }
+ ));
+ if let Some(fault) = health.last_fault.as_ref() {
+ lines.push(format!(
+ "fault: {} {} {}",
+ format!("{:?}", fault.kind).to_ascii_lowercase(),
+ fault.operation,
+ fault.message,
+ ));
+ }
+ detailed_tool_output(
+ &Value::Object(concise),
+ health,
+ lines.join("\n"),
+ None,
+ FaultStage::Host,
+ "tools/call:system.health",
+ )
+}
+
+fn system_telemetry_output(telemetry: &ServerTelemetry) -> Result<ToolOutput, FaultRecord> {
+ let hot_operations = telemetry
+ .operations
+ .iter()
+ .map(|(operation, stats)| {
+ (
+ operation.clone(),
+ stats.requests,
+ stats.errors,
+ stats.retries,
+ stats.last_latency_ms.unwrap_or(0),
+ )
+ })
+ .collect::<Vec<_>>();
+ let mut hot_operations = hot_operations;
+ hot_operations.sort_by(|left, right| {
+ right
+ .1
+ .cmp(&left.1)
+ .then_with(|| right.2.cmp(&left.2))
+ .then_with(|| right.3.cmp(&left.3))
+ .then_with(|| left.0.cmp(&right.0))
+ });
+ let hot_operations = hot_operations
+ .into_iter()
+ .take(6)
+ .map(|(operation, requests, errors, retries, last_latency_ms)| {
+ json!({
+ "operation": operation,
+ "requests": requests,
+ "errors": errors,
+ "retries": retries,
+ "last_latency_ms": last_latency_ms,
+ })
+ })
+ .collect::<Vec<_>>();
+
+ let mut concise = Map::new();
+ let _ = concise.insert("requests".to_owned(), json!(telemetry.requests));
+ let _ = concise.insert("successes".to_owned(), json!(telemetry.successes));
+ let _ = concise.insert("errors".to_owned(), json!(telemetry.errors));
+ let _ = concise.insert("retries".to_owned(), json!(telemetry.retries));
+ let _ = concise.insert(
+ "worker_restarts".to_owned(),
+ json!(telemetry.worker_restarts),
+ );
+ let _ = concise.insert("host_rollouts".to_owned(), json!(telemetry.host_rollouts));
+ let _ = concise.insert("hot_operations".to_owned(), Value::Array(hot_operations));
+ if let Some(fault) = telemetry.last_fault.as_ref() {
+ let _ = concise.insert(
+ "last_fault".to_owned(),
+ json!({
+ "kind": format!("{:?}", fault.kind).to_ascii_lowercase(),
+ "operation": fault.operation,
+ "message": fault.message,
+ }),
+ );
+ }
+
+ let mut lines = vec![format!(
+ "requests={} success={} error={} retry={}",
+ telemetry.requests, telemetry.successes, telemetry.errors, telemetry.retries
+ )];
+ lines.push(format!(
+ "worker_restarts={} host_rollouts={}",
+ telemetry.worker_restarts, telemetry.host_rollouts
+ ));
+ let mut ranked_operations = telemetry.operations.iter().collect::<Vec<_>>();
+ ranked_operations.sort_by(|(left_name, left), (right_name, right)| {
+ right
+ .requests
+ .cmp(&left.requests)
+ .then_with(|| right.errors.cmp(&left.errors))
+ .then_with(|| right.retries.cmp(&left.retries))
+ .then_with(|| left_name.cmp(right_name))
+ });
+ if !ranked_operations.is_empty() {
+ lines.push("hot operations:".to_owned());
+ for (operation, stats) in ranked_operations.into_iter().take(6) {
+ lines.push(format!(
+ "{} req={} err={} retry={} last={}ms",
+ operation,
+ stats.requests,
+ stats.errors,
+ stats.retries,
+ stats.last_latency_ms.unwrap_or(0),
+ ));
+ }
+ }
+ if let Some(fault) = telemetry.last_fault.as_ref() {
+ lines.push(format!("last fault: {} {}", fault.operation, fault.message));
+ }
+ detailed_tool_output(
+ &Value::Object(concise),
+ telemetry,
+ lines.join("\n"),
+ None,
+ FaultStage::Host,
+ "tools/call:system.telemetry",
+ )
}
fn host_store_fault(
diff --git a/crates/fidget-spinner-cli/src/mcp/output.rs b/crates/fidget-spinner-cli/src/mcp/output.rs
index 58f7eb4..01b7bc1 100644
--- a/crates/fidget-spinner-cli/src/mcp/output.rs
+++ b/crates/fidget-spinner-cli/src/mcp/output.rs
@@ -1,16 +1,72 @@
-use libmcp::{JsonPorcelainConfig, RenderMode, render_json_porcelain};
+use libmcp::{
+ DetailLevel, JsonPorcelainConfig, RenderMode, render_json_porcelain,
+ with_presentation_properties,
+};
use serde::Serialize;
-use serde_json::{Map, Value, json};
+use serde_json::{Value, json};
use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
-pub(crate) fn split_render_mode(
+const CONCISE_PORCELAIN_MAX_LINES: usize = 12;
+const CONCISE_PORCELAIN_MAX_INLINE_CHARS: usize = 160;
+const FULL_PORCELAIN_MAX_LINES: usize = 40;
+const FULL_PORCELAIN_MAX_INLINE_CHARS: usize = 512;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(crate) struct Presentation {
+ pub render: RenderMode,
+ pub detail: DetailLevel,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct ToolOutput {
+ concise: Value,
+ full: Value,
+ concise_text: String,
+ full_text: Option<String>,
+}
+
+impl ToolOutput {
+ #[must_use]
+ pub(crate) fn from_values(
+ concise: Value,
+ full: Value,
+ concise_text: impl Into<String>,
+ full_text: Option<String>,
+ ) -> Self {
+ Self {
+ concise,
+ full,
+ concise_text: concise_text.into(),
+ full_text,
+ }
+ }
+
+ fn structured(&self, detail: DetailLevel) -> &Value {
+ match detail {
+ DetailLevel::Concise => &self.concise,
+ DetailLevel::Full => &self.full,
+ }
+ }
+
+ fn porcelain_text(&self, detail: DetailLevel) -> String {
+ match detail {
+ DetailLevel::Concise => self.concise_text.clone(),
+ DetailLevel::Full => self
+ .full_text
+ .clone()
+ .unwrap_or_else(|| render_json_porcelain(&self.full, full_porcelain_config())),
+ }
+ }
+}
+
+pub(crate) fn split_presentation(
arguments: Value,
operation: &str,
stage: FaultStage,
-) -> Result<(RenderMode, Value), FaultRecord> {
+) -> Result<(Presentation, Value), FaultRecord> {
let Value::Object(mut object) = arguments else {
- return Ok((RenderMode::Porcelain, arguments));
+ return Ok((Presentation::default(), arguments));
};
let render = object
.remove("render")
@@ -26,29 +82,71 @@ pub(crate) fn split_render_mode(
})
.transpose()?
.unwrap_or(RenderMode::Porcelain);
- Ok((render, Value::Object(object)))
+ let detail = object
+ .remove("detail")
+ .map(|value| {
+ serde_json::from_value::<DetailLevel>(value).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ stage,
+ operation,
+ format!("invalid detail level: {error}"),
+ )
+ })
+ })
+ .transpose()?
+ .unwrap_or(DetailLevel::Concise);
+ Ok((Presentation { render, detail }, Value::Object(object)))
}
-pub(crate) fn tool_success(
+pub(crate) fn tool_output(
value: &impl Serialize,
- render: RenderMode,
stage: FaultStage,
operation: &str,
-) -> Result<Value, FaultRecord> {
+) -> Result<ToolOutput, FaultRecord> {
let structured = serde_json::to_value(value).map_err(|error| {
FaultRecord::new(FaultKind::Internal, stage, operation, error.to_string())
})?;
- tool_success_from_value(structured, render, stage, operation)
+ let concise_text = render_json_porcelain(&structured, concise_porcelain_config());
+ Ok(ToolOutput::from_values(
+ structured.clone(),
+ structured,
+ concise_text,
+ None,
+ ))
+}
+
+pub(crate) fn detailed_tool_output(
+ concise: &impl Serialize,
+ full: &impl Serialize,
+ concise_text: impl Into<String>,
+ full_text: Option<String>,
+ stage: FaultStage,
+ operation: &str,
+) -> Result<ToolOutput, FaultRecord> {
+ let concise = serde_json::to_value(concise).map_err(|error| {
+ FaultRecord::new(FaultKind::Internal, stage, operation, error.to_string())
+ })?;
+ let full = serde_json::to_value(full).map_err(|error| {
+ FaultRecord::new(FaultKind::Internal, stage, operation, error.to_string())
+ })?;
+ Ok(ToolOutput::from_values(
+ concise,
+ full,
+ concise_text,
+ full_text,
+ ))
}
-pub(crate) fn tool_success_from_value(
- structured: Value,
- render: RenderMode,
+pub(crate) fn tool_success(
+ output: ToolOutput,
+ presentation: Presentation,
stage: FaultStage,
operation: &str,
) -> Result<Value, FaultRecord> {
- let text = match render {
- RenderMode::Porcelain => render_json_porcelain(&structured, JsonPorcelainConfig::default()),
+ let structured = output.structured(presentation.detail).clone();
+ let text = match presentation.render {
+ RenderMode::Porcelain => output.porcelain_text(presentation.detail),
RenderMode::Json => crate::to_pretty_json(&structured).map_err(|error| {
FaultRecord::new(FaultKind::Internal, stage, operation, error.to_string())
})?,
@@ -63,26 +161,29 @@ pub(crate) fn tool_success_from_value(
}))
}
-pub(crate) fn with_render_property(schema: Value) -> Value {
- let Value::Object(mut object) = schema else {
- return schema;
- };
+pub(crate) fn with_common_presentation(schema: Value) -> Value {
+ with_presentation_properties(schema)
+}
+
+const fn concise_porcelain_config() -> JsonPorcelainConfig {
+ JsonPorcelainConfig {
+ max_lines: CONCISE_PORCELAIN_MAX_LINES,
+ max_inline_chars: CONCISE_PORCELAIN_MAX_INLINE_CHARS,
+ }
+}
+
+const fn full_porcelain_config() -> JsonPorcelainConfig {
+ JsonPorcelainConfig {
+ max_lines: FULL_PORCELAIN_MAX_LINES,
+ max_inline_chars: FULL_PORCELAIN_MAX_INLINE_CHARS,
+ }
+}
- let properties = object
- .entry("properties".to_owned())
- .or_insert_with(|| Value::Object(Map::new()));
- if let Value::Object(properties) = properties {
- let _ = properties.insert(
- "render".to_owned(),
- json!({
- "type": "string",
- "enum": ["porcelain", "json"],
- "description": "Output mode. Defaults to porcelain for model-friendly summaries."
- }),
- );
+impl Default for Presentation {
+ fn default() -> Self {
+ Self {
+ render: RenderMode::Porcelain,
+ detail: DetailLevel::Concise,
+ }
}
- let _ = object
- .entry("additionalProperties".to_owned())
- .or_insert(Value::Bool(false));
- Value::Object(object)
}
diff --git a/crates/fidget-spinner-cli/src/mcp/service.rs b/crates/fidget-spinner-cli/src/mcp/service.rs
index 6b9c5da..aee53e0 100644
--- a/crates/fidget-spinner-cli/src/mcp/service.rs
+++ b/crates/fidget-spinner-cli/src/mcp/service.rs
@@ -1,22 +1,25 @@
-use std::collections::BTreeMap;
+use std::collections::{BTreeMap, BTreeSet};
use std::fs;
use camino::{Utf8Path, Utf8PathBuf};
use fidget_spinner_core::{
- AnnotationVisibility, CodeSnapshotRef, CommandRecipe, ExecutionBackend, FrontierContract,
- FrontierNote, FrontierVerdict, MetricObservation, MetricSpec, MetricUnit, NodeAnnotation,
- NodeClass, NodePayload, NonEmptyText,
+ AdmissionState, AnnotationVisibility, CodeSnapshotRef, CommandRecipe, ExecutionBackend,
+ FrontierContract, FrontierNote, FrontierProjection, FrontierRecord, FrontierVerdict,
+ MetricObservation, MetricSpec, MetricUnit, NodeAnnotation, NodeClass, NodePayload,
+ NonEmptyText, ProjectSchema, TagName, TagRecord,
};
use fidget_spinner_store_sqlite::{
CloseExperimentRequest, CreateFrontierRequest, CreateNodeRequest, EdgeAttachment,
- EdgeAttachmentDirection, ListNodesQuery, ProjectStore, StoreError,
+ EdgeAttachmentDirection, ExperimentReceipt, ListNodesQuery, NodeSummary, ProjectStore,
+ StoreError,
};
-use libmcp::RenderMode;
use serde::Deserialize;
use serde_json::{Map, Value, json};
use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
-use crate::mcp::output::split_render_mode;
+use crate::mcp::output::{
+ ToolOutput, detailed_tool_output, split_presentation, tool_output, tool_success,
+};
use crate::mcp::protocol::{TRANSIENT_ONCE_ENV, TRANSIENT_ONCE_MARKER_ENV, WorkerOperation};
pub(crate) struct WorkerService {
@@ -45,10 +48,11 @@ impl WorkerService {
fn call_tool(&mut self, name: &str, arguments: Value) -> Result<Value, FaultRecord> {
let operation = format!("tools/call:{name}");
- let (render, arguments) = split_render_mode(arguments, &operation, FaultStage::Worker)?;
+ let (presentation, arguments) =
+ split_presentation(arguments, &operation, FaultStage::Worker)?;
match name {
- "project.status" => tool_success(
- &json!({
+ "project.status" => {
+ let status = json!({
"project_root": self.store.project_root(),
"state_root": self.store.state_root(),
"display_name": self.store.config().display_name,
@@ -56,28 +60,75 @@ impl WorkerService {
"git_repo_detected": crate::run_git(self.store.project_root(), &["rev-parse", "--show-toplevel"])
.map_err(store_fault("tools/call:project.status"))?
.is_some(),
- }),
- render,
+ });
+ tool_success(
+ project_status_output(&status, self.store.schema()),
+ presentation,
+ FaultStage::Worker,
+ "tools/call:project.status",
+ )
+ }
+ "project.schema" => tool_success(
+ project_schema_output(self.store.schema())?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:project.schema",
),
- "project.schema" => tool_success(self.store.schema(), render),
- "frontier.list" => tool_success(
- &self
+ "tag.add" => {
+ let args = deserialize::<TagAddToolArgs>(arguments)?;
+ let tag = self
+ .store
+ .add_tag(
+ TagName::new(args.name).map_err(store_fault("tools/call:tag.add"))?,
+ NonEmptyText::new(args.description)
+ .map_err(store_fault("tools/call:tag.add"))?,
+ )
+ .map_err(store_fault("tools/call:tag.add"))?;
+ tool_success(
+ tag_add_output(&tag)?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:tag.add",
+ )
+ }
+ "tag.list" => {
+ let tags = self
+ .store
+ .list_tags()
+ .map_err(store_fault("tools/call:tag.list"))?;
+ tool_success(
+ tag_list_output(tags.as_slice())?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:tag.list",
+ )
+ }
+ "frontier.list" => {
+ let frontiers = self
.store
.list_frontiers()
- .map_err(store_fault("tools/call:frontier.list"))?,
- render,
- ),
+ .map_err(store_fault("tools/call:frontier.list"))?;
+ tool_success(
+ frontier_list_output(frontiers.as_slice())?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:frontier.list",
+ )
+ }
"frontier.status" => {
let args = deserialize::<FrontierStatusToolArgs>(arguments)?;
+ let projection = self
+ .store
+ .frontier_projection(
+ crate::parse_frontier_id(&args.frontier_id)
+ .map_err(store_fault("tools/call:frontier.status"))?,
+ )
+ .map_err(store_fault("tools/call:frontier.status"))?;
tool_success(
- &self
- .store
- .frontier_projection(
- crate::parse_frontier_id(&args.frontier_id)
- .map_err(store_fault("tools/call:frontier.status"))?,
- )
- .map_err(store_fault("tools/call:frontier.status"))?,
- render,
+ frontier_status_output(&projection)?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:frontier.status",
)
}
"frontier.init" => {
@@ -133,7 +184,12 @@ impl WorkerService {
initial_checkpoint,
})
.map_err(store_fault("tools/call:frontier.init"))?;
- tool_success(&projection, render)
+ tool_success(
+ frontier_created_output(&projection)?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:frontier.init",
+ )
}
"node.create" => {
let args = deserialize::<NodeCreateToolArgs>(arguments)?;
@@ -155,6 +211,11 @@ impl WorkerService {
.map(NonEmptyText::new)
.transpose()
.map_err(store_fault("tools/call:node.create"))?,
+ tags: args
+ .tags
+ .map(parse_tag_set)
+ .transpose()
+ .map_err(store_fault("tools/call:node.create"))?,
payload: NodePayload::with_schema(
self.store.schema().schema_ref(),
args.payload.unwrap_or_default(),
@@ -165,7 +226,12 @@ impl WorkerService {
.map_err(store_fault("tools/call:node.create"))?,
})
.map_err(store_fault("tools/call:node.create"))?;
- tool_success(&node, render)
+ tool_success(
+ created_node_output("created node", &node, "tools/call:node.create")?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:node.create",
+ )
}
"change.record" => {
let args = deserialize::<ChangeRecordToolArgs>(arguments)?;
@@ -199,6 +265,7 @@ impl WorkerService {
.map(NonEmptyText::new)
.transpose()
.map_err(store_fault("tools/call:change.record"))?,
+ tags: None,
payload: NodePayload::with_schema(self.store.schema().schema_ref(), fields),
annotations: tool_annotations(args.annotations)
.map_err(store_fault("tools/call:change.record"))?,
@@ -206,7 +273,12 @@ impl WorkerService {
.map_err(store_fault("tools/call:change.record"))?,
})
.map_err(store_fault("tools/call:change.record"))?;
- tool_success(&node, render)
+ tool_success(
+ created_node_output("recorded change", &node, "tools/call:change.record")?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:change.record",
+ )
}
"node.list" => {
let args = deserialize::<NodeListToolArgs>(arguments)?;
@@ -225,11 +297,18 @@ impl WorkerService {
.map(parse_node_class_name)
.transpose()
.map_err(store_fault("tools/call:node.list"))?,
+ tags: parse_tag_set(args.tags)
+ .map_err(store_fault("tools/call:node.list"))?,
include_archived: args.include_archived,
limit: args.limit.unwrap_or(20),
})
.map_err(store_fault("tools/call:node.list"))?;
- tool_success(&nodes, render)
+ tool_success(
+ node_list_output(nodes.as_slice())?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:node.list",
+ )
}
"node.read" => {
let args = deserialize::<NodeReadToolArgs>(arguments)?;
@@ -247,7 +326,12 @@ impl WorkerService {
format!("node {node_id} was not found"),
)
})?;
- tool_success(&node, render)
+ tool_success(
+ node_read_output(&node)?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:node.read",
+ )
}
"node.annotate" => {
let args = deserialize::<NodeAnnotateToolArgs>(arguments)?;
@@ -274,7 +358,16 @@ impl WorkerService {
annotation,
)
.map_err(store_fault("tools/call:node.annotate"))?;
- tool_success(&json!({"annotated": args.node_id}), render)
+ tool_success(
+ tool_output(
+ &json!({"annotated": args.node_id}),
+ FaultStage::Worker,
+ "tools/call:node.annotate",
+ )?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:node.annotate",
+ )
}
"node.archive" => {
let args = deserialize::<NodeArchiveToolArgs>(arguments)?;
@@ -284,7 +377,16 @@ impl WorkerService {
.map_err(store_fault("tools/call:node.archive"))?,
)
.map_err(store_fault("tools/call:node.archive"))?;
- tool_success(&json!({"archived": args.node_id}), render)
+ tool_success(
+ tool_output(
+ &json!({"archived": args.node_id}),
+ FaultStage::Worker,
+ "tools/call:node.archive",
+ )?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:node.archive",
+ )
}
"note.quick" => {
let args = deserialize::<QuickNoteToolArgs>(arguments)?;
@@ -301,6 +403,10 @@ impl WorkerService {
title: NonEmptyText::new(args.title)
.map_err(store_fault("tools/call:note.quick"))?,
summary: None,
+ tags: Some(
+ parse_tag_set(args.tags)
+ .map_err(store_fault("tools/call:note.quick"))?,
+ ),
payload: NodePayload::with_schema(
self.store.schema().schema_ref(),
crate::json_object(json!({ "body": args.body }))
@@ -312,7 +418,12 @@ impl WorkerService {
.map_err(store_fault("tools/call:note.quick"))?,
})
.map_err(store_fault("tools/call:note.quick"))?;
- tool_success(&node, render)
+ tool_success(
+ created_node_output("recorded note", &node, "tools/call:note.quick")?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:note.quick",
+ )
}
"research.record" => {
let args = deserialize::<ResearchRecordToolArgs>(arguments)?;
@@ -333,6 +444,7 @@ impl WorkerService {
.map(NonEmptyText::new)
.transpose()
.map_err(store_fault("tools/call:research.record"))?,
+ tags: None,
payload: NodePayload::with_schema(
self.store.schema().schema_ref(),
crate::json_object(json!({ "body": args.body }))
@@ -344,7 +456,12 @@ impl WorkerService {
.map_err(store_fault("tools/call:research.record"))?,
})
.map_err(store_fault("tools/call:research.record"))?;
- tool_success(&node, render)
+ tool_success(
+ created_node_output("recorded research", &node, "tools/call:research.record")?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:research.record",
+ )
}
"experiment.close" => {
let args = deserialize::<ExperimentCloseToolArgs>(arguments)?;
@@ -429,7 +546,12 @@ impl WorkerService {
.map_err(store_fault("tools/call:experiment.close"))?,
})
.map_err(store_fault("tools/call:experiment.close"))?;
- tool_success(&receipt, render)
+ tool_success(
+ experiment_close_output(&receipt)?,
+ presentation,
+ FaultStage::Worker,
+ "tools/call:experiment.close",
+ )
}
other => Err(FaultRecord::new(
FaultKind::InvalidInput,
@@ -510,8 +632,711 @@ fn deserialize<T: for<'de> Deserialize<'de>>(value: Value) -> Result<T, FaultRec
})
}
-fn tool_success(value: &impl serde::Serialize, render: RenderMode) -> Result<Value, FaultRecord> {
- crate::mcp::output::tool_success(value, render, FaultStage::Worker, "worker.tool_success")
+fn project_status_output(full: &Value, schema: &ProjectSchema) -> ToolOutput {
+ let concise = json!({
+ "display_name": full["display_name"],
+ "project_root": full["project_root"],
+ "state_root": full["state_root"],
+ "schema": schema_label(schema),
+ "git_repo_detected": full["git_repo_detected"],
+ });
+ let git = if full["git_repo_detected"].as_bool().unwrap_or(false) {
+ "detected"
+ } else {
+ "not detected"
+ };
+ ToolOutput::from_values(
+ concise,
+ full.clone(),
+ [
+ format!("project {}", value_summary(&full["display_name"])),
+ format!("root: {}", value_summary(&full["project_root"])),
+ format!("state: {}", value_summary(&full["state_root"])),
+ format!("schema: {}", schema_label(schema)),
+ format!("git: {git}"),
+ ]
+ .join("\n"),
+ None,
+ )
+}
+
+fn project_schema_output(schema: &ProjectSchema) -> Result<ToolOutput, FaultRecord> {
+ let field_previews = schema
+ .fields
+ .iter()
+ .take(8)
+ .map(project_schema_field_value)
+ .collect::<Vec<_>>();
+ let concise = json!({
+ "namespace": schema.namespace,
+ "version": schema.version,
+ "field_count": schema.fields.len(),
+ "fields": field_previews,
+ "truncated": schema.fields.len() > 8,
+ });
+ let mut lines = vec![
+ format!("schema {}", schema_label(schema)),
+ format!("{} field(s)", schema.fields.len()),
+ ];
+ for field in schema.fields.iter().take(8) {
+ lines.push(format!(
+ "{} [{}] {} {}",
+ field.name,
+ if field.node_classes.is_empty() {
+ "any".to_owned()
+ } else {
+ field
+ .node_classes
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ .join(",")
+ },
+ format!("{:?}", field.presence).to_ascii_lowercase(),
+ format!("{:?}", field.role).to_ascii_lowercase(),
+ ));
+ }
+ if schema.fields.len() > 8 {
+ lines.push(format!("... +{} more field(s)", schema.fields.len() - 8));
+ }
+ detailed_tool_output(
+ &concise,
+ schema,
+ lines.join("\n"),
+ None,
+ FaultStage::Worker,
+ "tools/call:project.schema",
+ )
+}
+
+fn tag_add_output(tag: &TagRecord) -> Result<ToolOutput, FaultRecord> {
+ let concise = json!({
+ "name": tag.name,
+ "description": tag.description,
+ });
+ detailed_tool_output(
+ &concise,
+ tag,
+ format!("registered tag {}\n{}", tag.name, tag.description),
+ None,
+ FaultStage::Worker,
+ "tools/call:tag.add",
+ )
+}
+
+fn tag_list_output(tags: &[TagRecord]) -> Result<ToolOutput, FaultRecord> {
+ let concise = tags
+ .iter()
+ .map(|tag| {
+ json!({
+ "name": tag.name,
+ "description": tag.description,
+ })
+ })
+ .collect::<Vec<_>>();
+ let mut lines = vec![format!("{} tag(s)", tags.len())];
+ lines.extend(
+ tags.iter()
+ .map(|tag| format!("{}: {}", tag.name, tag.description)),
+ );
+ detailed_tool_output(
+ &concise,
+ &tags,
+ lines.join("\n"),
+ None,
+ FaultStage::Worker,
+ "tools/call:tag.list",
+ )
+}
+
+fn frontier_list_output(frontiers: &[FrontierRecord]) -> Result<ToolOutput, FaultRecord> {
+ let concise = frontiers
+ .iter()
+ .map(|frontier| {
+ json!({
+ "frontier_id": frontier.id,
+ "label": frontier.label,
+ "status": format!("{:?}", frontier.status).to_ascii_lowercase(),
+ })
+ })
+ .collect::<Vec<_>>();
+ let mut lines = vec![format!("{} frontier(s)", frontiers.len())];
+ lines.extend(frontiers.iter().map(|frontier| {
+ format!(
+ "{} {} {}",
+ frontier.id,
+ format!("{:?}", frontier.status).to_ascii_lowercase(),
+ frontier.label,
+ )
+ }));
+ detailed_tool_output(
+ &concise,
+ &frontiers,
+ lines.join("\n"),
+ None,
+ FaultStage::Worker,
+ "tools/call:frontier.list",
+ )
+}
+
+fn frontier_status_output(projection: &FrontierProjection) -> Result<ToolOutput, FaultRecord> {
+ let concise = frontier_projection_summary_value(projection);
+ detailed_tool_output(
+ &concise,
+ projection,
+ frontier_projection_text("frontier", projection),
+ None,
+ FaultStage::Worker,
+ "tools/call:frontier.status",
+ )
+}
+
+fn frontier_created_output(projection: &FrontierProjection) -> Result<ToolOutput, FaultRecord> {
+ let concise = frontier_projection_summary_value(projection);
+ detailed_tool_output(
+ &concise,
+ projection,
+ frontier_projection_text("created frontier", projection),
+ None,
+ FaultStage::Worker,
+ "tools/call:frontier.init",
+ )
+}
+
+fn created_node_output(
+ action: &str,
+ node: &fidget_spinner_core::DagNode,
+ operation: &'static str,
+) -> Result<ToolOutput, FaultRecord> {
+ let concise = node_brief_value(node);
+ let mut lines = vec![format!("{action}: {} {}", node.class, node.id)];
+ lines.push(format!("title: {}", node.title));
+ if let Some(summary) = node.summary.as_ref() {
+ lines.push(format!("summary: {summary}"));
+ }
+ if !node.tags.is_empty() {
+ lines.push(format!("tags: {}", format_tags(&node.tags)));
+ }
+ if let Some(frontier_id) = node.frontier_id {
+ lines.push(format!("frontier: {frontier_id}"));
+ }
+ if !node.diagnostics.items.is_empty() {
+ lines.push(format!(
+ "diagnostics: {}",
+ diagnostic_summary_text(&node.diagnostics)
+ ));
+ }
+ detailed_tool_output(
+ &concise,
+ node,
+ lines.join("\n"),
+ None,
+ FaultStage::Worker,
+ operation,
+ )
+}
+
+fn node_list_output(nodes: &[NodeSummary]) -> Result<ToolOutput, FaultRecord> {
+ let concise = nodes.iter().map(node_summary_value).collect::<Vec<_>>();
+ let mut lines = vec![format!("{} node(s)", nodes.len())];
+ lines.extend(nodes.iter().map(render_node_summary_line));
+ detailed_tool_output(
+ &concise,
+ &nodes,
+ lines.join("\n"),
+ None,
+ FaultStage::Worker,
+ "tools/call:node.list",
+ )
+}
+
+fn node_read_output(node: &fidget_spinner_core::DagNode) -> Result<ToolOutput, FaultRecord> {
+ let visible_annotations = node
+ .annotations
+ .iter()
+ .filter(|annotation| annotation.visibility == AnnotationVisibility::Visible)
+ .map(|annotation| {
+ let mut value = Map::new();
+ if let Some(label) = annotation.label.as_ref() {
+ let _ = value.insert("label".to_owned(), json!(label));
+ }
+ let _ = value.insert("body".to_owned(), json!(annotation.body));
+ Value::Object(value)
+ })
+ .collect::<Vec<_>>();
+ let visible_annotation_count = visible_annotations.len();
+ let hidden_annotation_count = node
+ .annotations
+ .iter()
+ .filter(|annotation| annotation.visibility == AnnotationVisibility::HiddenByDefault)
+ .count();
+ let mut concise = Map::new();
+ let _ = concise.insert("id".to_owned(), json!(node.id));
+ let _ = concise.insert("class".to_owned(), json!(node.class.as_str()));
+ let _ = concise.insert("title".to_owned(), json!(node.title));
+ if let Some(summary) = node.summary.as_ref() {
+ let _ = concise.insert("summary".to_owned(), json!(summary));
+ }
+ if let Some(frontier_id) = node.frontier_id {
+ let _ = concise.insert("frontier_id".to_owned(), json!(frontier_id));
+ }
+ if !node.tags.is_empty() {
+ let _ = concise.insert(
+ "tags".to_owned(),
+ json!(
+ node.tags
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ ),
+ );
+ }
+ if !node.payload.fields.is_empty() {
+ let _ = concise.insert(
+ "payload_field_count".to_owned(),
+ json!(node.payload.fields.len()),
+ );
+ let _ = concise.insert(
+ "payload_preview".to_owned(),
+ payload_preview_value(&node.payload.fields),
+ );
+ }
+ if !node.diagnostics.items.is_empty() {
+ let _ = concise.insert(
+ "diagnostics".to_owned(),
+ diagnostic_summary_value(&node.diagnostics),
+ );
+ }
+ if visible_annotation_count > 0 {
+ let _ = concise.insert(
+ "visible_annotations".to_owned(),
+ Value::Array(visible_annotations),
+ );
+ }
+ if hidden_annotation_count > 0 {
+ let _ = concise.insert(
+ "hidden_annotation_count".to_owned(),
+ json!(hidden_annotation_count),
+ );
+ }
+
+ let mut lines = vec![format!("{} {} {}", node.class, node.id, node.title)];
+ if let Some(summary) = node.summary.as_ref() {
+ lines.push(format!("summary: {summary}"));
+ }
+ if let Some(frontier_id) = node.frontier_id {
+ lines.push(format!("frontier: {frontier_id}"));
+ }
+ if !node.tags.is_empty() {
+ lines.push(format!("tags: {}", format_tags(&node.tags)));
+ }
+ lines.extend(payload_preview_lines(&node.payload.fields));
+ if !node.diagnostics.items.is_empty() {
+ lines.push(format!(
+ "diagnostics: {}",
+ diagnostic_summary_text(&node.diagnostics)
+ ));
+ }
+ if visible_annotation_count > 0 {
+ lines.push(format!("visible annotations: {}", visible_annotation_count));
+ for annotation in node
+ .annotations
+ .iter()
+ .filter(|annotation| annotation.visibility == AnnotationVisibility::Visible)
+ .take(4)
+ {
+ let label = annotation
+ .label
+ .as_ref()
+ .map(|label| format!("{label}: "))
+ .unwrap_or_default();
+ lines.push(format!("annotation: {label}{}", annotation.body));
+ }
+ if visible_annotation_count > 4 {
+ lines.push(format!(
+ "... +{} more visible annotation(s)",
+ visible_annotation_count - 4
+ ));
+ }
+ }
+ if hidden_annotation_count > 0 {
+ lines.push(format!("hidden annotations: {hidden_annotation_count}"));
+ }
+ detailed_tool_output(
+ &Value::Object(concise),
+ node,
+ lines.join("\n"),
+ None,
+ FaultStage::Worker,
+ "tools/call:node.read",
+ )
+}
+
+fn experiment_close_output(receipt: &ExperimentReceipt) -> Result<ToolOutput, FaultRecord> {
+ let concise = json!({
+ "experiment_id": receipt.experiment.id,
+ "frontier_id": receipt.experiment.frontier_id,
+ "candidate_checkpoint_id": receipt.experiment.candidate_checkpoint_id,
+ "verdict": format!("{:?}", receipt.experiment.verdict).to_ascii_lowercase(),
+ "run_id": receipt.run.run_id,
+ "decision_node_id": receipt.decision_node.id,
+ "primary_metric": metric_value(&receipt.experiment.result.primary_metric),
+ });
+ detailed_tool_output(
+ &concise,
+ receipt,
+ [
+ format!(
+ "closed experiment {} on frontier {}",
+ receipt.experiment.id, receipt.experiment.frontier_id
+ ),
+ format!("candidate: {}", receipt.experiment.candidate_checkpoint_id),
+ format!(
+ "verdict: {}",
+ format!("{:?}", receipt.experiment.verdict).to_ascii_lowercase()
+ ),
+ format!(
+ "primary metric: {}",
+ metric_text(&receipt.experiment.result.primary_metric)
+ ),
+ format!("run: {}", receipt.run.run_id),
+ ]
+ .join("\n"),
+ None,
+ FaultStage::Worker,
+ "tools/call:experiment.close",
+ )
+}
+
+fn project_schema_field_value(field: &fidget_spinner_core::ProjectFieldSpec) -> Value {
+ let mut value = Map::new();
+ let _ = value.insert("name".to_owned(), json!(field.name));
+ if !field.node_classes.is_empty() {
+ let _ = value.insert(
+ "node_classes".to_owned(),
+ json!(
+ field
+ .node_classes
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ ),
+ );
+ }
+ let _ = value.insert(
+ "presence".to_owned(),
+ json!(format!("{:?}", field.presence).to_ascii_lowercase()),
+ );
+ let _ = value.insert(
+ "severity".to_owned(),
+ json!(format!("{:?}", field.severity).to_ascii_lowercase()),
+ );
+ let _ = value.insert(
+ "role".to_owned(),
+ json!(format!("{:?}", field.role).to_ascii_lowercase()),
+ );
+ let _ = value.insert(
+ "inference_policy".to_owned(),
+ json!(format!("{:?}", field.inference_policy).to_ascii_lowercase()),
+ );
+ if let Some(value_type) = field.value_type {
+ let _ = value.insert("value_type".to_owned(), json!(value_type.as_str()));
+ }
+ Value::Object(value)
+}
+
+fn frontier_projection_summary_value(projection: &FrontierProjection) -> Value {
+ json!({
+ "frontier_id": projection.frontier.id,
+ "label": projection.frontier.label,
+ "status": format!("{:?}", projection.frontier.status).to_ascii_lowercase(),
+ "champion_checkpoint_id": projection.champion_checkpoint_id,
+ "candidate_checkpoint_ids": projection.candidate_checkpoint_ids,
+ "experiment_count": projection.experiment_count,
+ })
+}
+
+fn frontier_projection_text(prefix: &str, projection: &FrontierProjection) -> String {
+ let champion = projection
+ .champion_checkpoint_id
+ .map(|value| value.to_string())
+ .unwrap_or_else(|| "none".to_owned());
+ [
+ format!(
+ "{prefix} {} {}",
+ projection.frontier.id, projection.frontier.label
+ ),
+ format!(
+ "status: {}",
+ format!("{:?}", projection.frontier.status).to_ascii_lowercase()
+ ),
+ format!("champion: {champion}"),
+ format!("candidates: {}", projection.candidate_checkpoint_ids.len()),
+ format!("experiments: {}", projection.experiment_count),
+ ]
+ .join("\n")
+}
+
+fn node_summary_value(node: &NodeSummary) -> Value {
+ let mut value = Map::new();
+ let _ = value.insert("id".to_owned(), json!(node.id));
+ let _ = value.insert("class".to_owned(), json!(node.class.as_str()));
+ let _ = value.insert("title".to_owned(), json!(node.title));
+ if let Some(summary) = node.summary.as_ref() {
+ let _ = value.insert("summary".to_owned(), json!(summary));
+ }
+ if let Some(frontier_id) = node.frontier_id {
+ let _ = value.insert("frontier_id".to_owned(), json!(frontier_id));
+ }
+ if !node.tags.is_empty() {
+ let _ = value.insert(
+ "tags".to_owned(),
+ json!(
+ node.tags
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ ),
+ );
+ }
+ if node.archived {
+ let _ = value.insert("archived".to_owned(), json!(true));
+ }
+ if node.diagnostic_count > 0 {
+ let _ = value.insert("diagnostic_count".to_owned(), json!(node.diagnostic_count));
+ }
+ if node.hidden_annotation_count > 0 {
+ let _ = value.insert(
+ "hidden_annotation_count".to_owned(),
+ json!(node.hidden_annotation_count),
+ );
+ }
+ Value::Object(value)
+}
+
+fn node_brief_value(node: &fidget_spinner_core::DagNode) -> Value {
+ let mut value = Map::new();
+ let _ = value.insert("id".to_owned(), json!(node.id));
+ let _ = value.insert("class".to_owned(), json!(node.class.as_str()));
+ let _ = value.insert("title".to_owned(), json!(node.title));
+ if let Some(summary) = node.summary.as_ref() {
+ let _ = value.insert("summary".to_owned(), json!(summary));
+ }
+ if let Some(frontier_id) = node.frontier_id {
+ let _ = value.insert("frontier_id".to_owned(), json!(frontier_id));
+ }
+ if !node.tags.is_empty() {
+ let _ = value.insert(
+ "tags".to_owned(),
+ json!(
+ node.tags
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ ),
+ );
+ }
+ if !node.diagnostics.items.is_empty() {
+ let _ = value.insert(
+ "diagnostics".to_owned(),
+ diagnostic_summary_value(&node.diagnostics),
+ );
+ }
+ Value::Object(value)
+}
+
+fn render_node_summary_line(node: &NodeSummary) -> String {
+ let mut line = format!("{} {} {}", node.class, node.id, node.title);
+ if let Some(summary) = node.summary.as_ref() {
+ line.push_str(format!(" | {summary}").as_str());
+ }
+ if let Some(frontier_id) = node.frontier_id {
+ line.push_str(format!(" | frontier={frontier_id}").as_str());
+ }
+ if !node.tags.is_empty() {
+ line.push_str(format!(" | tags={}", format_tags(&node.tags)).as_str());
+ }
+ if node.diagnostic_count > 0 {
+ line.push_str(format!(" | diag={}", node.diagnostic_count).as_str());
+ }
+ if node.hidden_annotation_count > 0 {
+ line.push_str(format!(" | hidden-ann={}", node.hidden_annotation_count).as_str());
+ }
+ if node.archived {
+ line.push_str(" | archived");
+ }
+ line
+}
+
+fn diagnostic_summary_value(diagnostics: &fidget_spinner_core::NodeDiagnostics) -> Value {
+ let tally = diagnostic_tally(diagnostics);
+ json!({
+ "admission": match diagnostics.admission {
+ AdmissionState::Admitted => "admitted",
+ AdmissionState::Rejected => "rejected",
+ },
+ "count": tally.total,
+ "error_count": tally.errors,
+ "warning_count": tally.warnings,
+ "info_count": tally.infos,
+ })
+}
+
+fn diagnostic_summary_text(diagnostics: &fidget_spinner_core::NodeDiagnostics) -> String {
+ let tally = diagnostic_tally(diagnostics);
+ let mut parts = vec![format!("{}", tally.total)];
+ if tally.errors > 0 {
+ parts.push(format!("{} error", tally.errors));
+ }
+ if tally.warnings > 0 {
+ parts.push(format!("{} warning", tally.warnings));
+ }
+ if tally.infos > 0 {
+ parts.push(format!("{} info", tally.infos));
+ }
+ format!(
+ "{} ({})",
+ match diagnostics.admission {
+ AdmissionState::Admitted => "admitted",
+ AdmissionState::Rejected => "rejected",
+ },
+ parts.join(", ")
+ )
+}
+
+fn diagnostic_tally(diagnostics: &fidget_spinner_core::NodeDiagnostics) -> DiagnosticTally {
+ diagnostics
+ .items
+ .iter()
+ .fold(DiagnosticTally::default(), |mut tally, item| {
+ tally.total += 1;
+ match item.severity {
+ fidget_spinner_core::DiagnosticSeverity::Error => tally.errors += 1,
+ fidget_spinner_core::DiagnosticSeverity::Warning => tally.warnings += 1,
+ fidget_spinner_core::DiagnosticSeverity::Info => tally.infos += 1,
+ }
+ tally
+ })
+}
+
+fn payload_preview_value(fields: &Map<String, Value>) -> Value {
+ let mut preview = Map::new();
+ for (index, (name, value)) in fields.iter().enumerate() {
+ if index == 6 {
+ let _ = preview.insert(
+ "...".to_owned(),
+ json!(format!("+{} more field(s)", fields.len() - index)),
+ );
+ break;
+ }
+ let _ = preview.insert(name.clone(), payload_value_preview(value));
+ }
+ Value::Object(preview)
+}
+
+fn payload_preview_lines(fields: &Map<String, Value>) -> Vec<String> {
+ if fields.is_empty() {
+ return Vec::new();
+ }
+ let mut lines = vec![format!("payload fields: {}", fields.len())];
+ for (index, (name, value)) in fields.iter().enumerate() {
+ if index == 6 {
+ lines.push(format!("payload: +{} more field(s)", fields.len() - index));
+ break;
+ }
+ lines.push(format!(
+ "payload.{}: {}",
+ name,
+ value_summary(&payload_value_preview(value))
+ ));
+ }
+ lines
+}
+
+fn payload_value_preview(value: &Value) -> Value {
+ match value {
+ Value::Null | Value::Bool(_) | Value::Number(_) => value.clone(),
+ Value::String(text) => Value::String(libmcp::collapse_inline_whitespace(text)),
+ Value::Array(items) => {
+ let preview = items
+ .iter()
+ .take(3)
+ .map(payload_value_preview)
+ .collect::<Vec<_>>();
+ if items.len() > 3 {
+ json!({
+ "items": preview,
+ "truncated": true,
+ "total_count": items.len(),
+ })
+ } else {
+ Value::Array(preview)
+ }
+ }
+ Value::Object(object) => {
+ let mut preview = Map::new();
+ for (index, (name, nested)) in object.iter().enumerate() {
+ if index == 4 {
+ let _ = preview.insert(
+ "...".to_owned(),
+ json!(format!("+{} more field(s)", object.len() - index)),
+ );
+ break;
+ }
+ let _ = preview.insert(name.clone(), payload_value_preview(nested));
+ }
+ Value::Object(preview)
+ }
+ }
+}
+
+fn metric_value(metric: &MetricObservation) -> Value {
+ json!({
+ "key": metric.metric_key,
+ "value": metric.value,
+ "unit": format!("{:?}", metric.unit).to_ascii_lowercase(),
+ "objective": format!("{:?}", metric.objective).to_ascii_lowercase(),
+ })
+}
+
+fn metric_text(metric: &MetricObservation) -> String {
+ format!(
+ "{}={} {} ({})",
+ metric.metric_key,
+ metric.value,
+ format!("{:?}", metric.unit).to_ascii_lowercase(),
+ format!("{:?}", metric.objective).to_ascii_lowercase(),
+ )
+}
+
+fn format_tags(tags: &BTreeSet<TagName>) -> String {
+ tags.iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ .join(", ")
+}
+
+fn schema_label(schema: &ProjectSchema) -> String {
+ format!("{}@{}", schema.namespace, schema.version)
+}
+
+fn value_summary(value: &Value) -> String {
+ match value {
+ Value::Null => "null".to_owned(),
+ Value::Bool(flag) => flag.to_string(),
+ Value::Number(number) => number.to_string(),
+ Value::String(text) => text.clone(),
+ Value::Array(items) => format!("{} item(s)", items.len()),
+ Value::Object(object) => format!("{} field(s)", object.len()),
+ }
+}
+
+#[derive(Default)]
+struct DiagnosticTally {
+ total: usize,
+ errors: usize,
+ warnings: usize,
+ infos: usize,
}
fn store_fault<E>(operation: &'static str) -> impl FnOnce(E) -> FaultRecord
@@ -533,6 +1358,8 @@ fn classify_fault_kind(message: &str) -> FaultKind {
|| message.contains("invalid")
|| message.contains("unknown")
|| message.contains("empty")
+ || message.contains("already exists")
+ || message.contains("require an explicit tag list")
{
FaultKind::InvalidInput
} else {
@@ -571,6 +1398,14 @@ fn lineage_attachments(parents: Vec<String>) -> Result<Vec<EdgeAttachment>, Stor
.collect()
}
+fn parse_tag_set(values: Vec<String>) -> Result<BTreeSet<TagName>, StoreError> {
+ values
+ .into_iter()
+ .map(TagName::new)
+ .collect::<Result<BTreeSet<_>, _>>()
+ .map_err(StoreError::from)
+}
+
fn metric_spec_from_wire(raw: WireMetricSpec) -> Result<MetricSpec, StoreError> {
Ok(MetricSpec {
metric_key: NonEmptyText::new(raw.key)?,
@@ -656,6 +1491,12 @@ struct FrontierStatusToolArgs {
}
#[derive(Debug, Deserialize)]
+struct TagAddToolArgs {
+ name: String,
+ description: String,
+}
+
+#[derive(Debug, Deserialize)]
struct FrontierInitToolArgs {
label: String,
objective: String,
@@ -675,6 +1516,7 @@ struct NodeCreateToolArgs {
frontier_id: Option<String>,
title: String,
summary: Option<String>,
+ tags: Option<Vec<String>>,
#[serde(default)]
payload: Option<Map<String, Value>>,
#[serde(default)]
@@ -703,6 +1545,8 @@ struct NodeListToolArgs {
frontier_id: Option<String>,
class: Option<String>,
#[serde(default)]
+ tags: Vec<String>,
+ #[serde(default)]
include_archived: bool,
limit: Option<u32>,
}
@@ -731,6 +1575,7 @@ struct QuickNoteToolArgs {
frontier_id: Option<String>,
title: String,
body: String,
+ tags: Vec<String>,
#[serde(default)]
annotations: Vec<WireAnnotation>,
#[serde(default)]
diff --git a/crates/fidget-spinner-cli/tests/mcp_hardening.rs b/crates/fidget-spinner-cli/tests/mcp_hardening.rs
index 1c70562..6c81d7f 100644
--- a/crates/fidget-spinner-cli/tests/mcp_hardening.rs
+++ b/crates/fidget-spinner-cli/tests/mcp_hardening.rs
@@ -1,3 +1,4 @@
+use axum as _;
use std::fs;
use std::io::{self, BufRead, BufReader, Write};
use std::path::PathBuf;
@@ -9,9 +10,12 @@ use dirs as _;
use fidget_spinner_core::NonEmptyText;
use fidget_spinner_store_sqlite::{ListNodesQuery, ProjectStore};
use libmcp as _;
+use linkify as _;
+use maud as _;
use serde as _;
use serde_json::{Value, json};
use time as _;
+use tokio as _;
use uuid as _;
type TestResult<T = ()> = Result<T, Box<dyn std::error::Error>>;
@@ -183,21 +187,11 @@ fn cold_start_exposes_health_and_telemetry() -> TestResult {
let tools = harness.tools_list()?;
let tool_count = must_some(tools["result"]["tools"].as_array(), "tools array")?.len();
- assert!(tool_count >= 18);
+ assert!(tool_count >= 20);
let health = harness.call_tool(3, "system.health", json!({}))?;
- assert_eq!(
- tool_content(&health)["initialization"]["ready"].as_bool(),
- Some(true)
- );
- assert_eq!(
- tool_content(&health)["initialization"]["seed_captured"].as_bool(),
- Some(true)
- );
- assert_eq!(
- tool_content(&health)["binding"]["bound"].as_bool(),
- Some(false)
- );
+ assert_eq!(tool_content(&health)["ready"].as_bool(), Some(true));
+ assert_eq!(tool_content(&health)["bound"].as_bool(), Some(false));
let telemetry = harness.call_tool(4, "system.telemetry", json!({}))?;
assert!(tool_content(&telemetry)["requests"].as_u64().unwrap_or(0) >= 3);
@@ -234,13 +228,49 @@ fn tool_output_defaults_to_porcelain_and_supports_json_render() -> TestResult {
let porcelain = harness.call_tool(22, "project.status", json!({}))?;
let porcelain_text = must_some(tool_text(&porcelain), "porcelain project.status text")?;
- assert!(porcelain_text.contains("project_root:"));
+ assert!(porcelain_text.contains("root:"));
assert!(!porcelain_text.contains("\"project_root\":"));
- let json_render = harness.call_tool(23, "project.status", json!({"render": "json"}))?;
+ let health = harness.call_tool(23, "system.health", json!({}))?;
+ let health_text = must_some(tool_text(&health), "porcelain system.health text")?;
+ assert!(health_text.contains("ready | bound"));
+ assert!(health_text.contains("binary:"));
+
+ let frontier = harness.call_tool(
+ 24,
+ "frontier.init",
+ json!({
+ "label": "render frontier",
+ "objective": "exercise porcelain output",
+ "contract_title": "render contract",
+ "benchmark_suites": ["smoke"],
+ "promotion_criteria": ["retain key fields in porcelain"],
+ "primary_metric": {
+ "key": "score",
+ "unit": "count",
+ "objective": "maximize"
+ }
+ }),
+ )?;
+ assert_eq!(frontier["result"]["isError"].as_bool(), Some(false));
+
+ let frontier_list = harness.call_tool(25, "frontier.list", json!({}))?;
+ let frontier_text = must_some(tool_text(&frontier_list), "porcelain frontier.list text")?;
+ assert!(frontier_text.contains("render frontier"));
+ assert!(!frontier_text.contains("root_contract_node_id"));
+
+ let json_render = harness.call_tool(26, "project.status", json!({"render": "json"}))?;
let json_text = must_some(tool_text(&json_render), "json project.status text")?;
assert!(json_text.contains("\"project_root\":"));
assert!(json_text.trim_start().starts_with('{'));
+
+ let json_full = harness.call_tool(
+ 27,
+ "project.status",
+ json!({"render": "json", "detail": "full"}),
+ )?;
+ let json_full_text = must_some(tool_text(&json_full), "json full project.status text")?;
+ assert!(json_full_text.contains("\"schema\": {"));
Ok(())
}
@@ -389,6 +419,50 @@ fn unbound_project_tools_fail_with_bind_hint() -> TestResult {
}
#[test]
+fn bind_bootstraps_empty_project_root() -> TestResult {
+ let project_root = temp_project_root("bind_bootstrap")?;
+
+ let mut harness = McpHarness::spawn(None, &[])?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+
+ let bind = harness.bind_project(28, &project_root)?;
+ assert_eq!(bind["result"]["isError"].as_bool(), Some(false));
+ assert_eq!(
+ tool_content(&bind)["project_root"].as_str(),
+ Some(project_root.as_str())
+ );
+
+ let status = harness.call_tool(29, "project.status", json!({}))?;
+ assert_eq!(status["result"]["isError"].as_bool(), Some(false));
+ assert_eq!(
+ tool_content(&status)["project_root"].as_str(),
+ Some(project_root.as_str())
+ );
+
+ let store = must(ProjectStore::open(&project_root), "open bootstrapped store")?;
+ assert_eq!(store.project_root().as_str(), project_root.as_str());
+ Ok(())
+}
+
+#[test]
+fn bind_rejects_nonempty_uninitialized_root() -> TestResult {
+ let project_root = temp_project_root("bind_nonempty")?;
+ must(
+ fs::write(project_root.join("README.txt").as_std_path(), "occupied"),
+ "seed nonempty directory",
+ )?;
+
+ let mut harness = McpHarness::spawn(None, &[])?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+
+ let bind = harness.bind_project(30, &project_root)?;
+ assert_eq!(bind["result"]["isError"].as_bool(), Some(true));
+ Ok(())
+}
+
+#[test]
fn bind_retargets_writes_to_sibling_project_root() -> TestResult {
let spinner_root = temp_project_root("spinner_root")?;
let libgrid_root = temp_project_root("libgrid_root")?;
@@ -404,31 +478,32 @@ fn bind_retargets_writes_to_sibling_project_root() -> TestResult {
let _ = harness.initialize()?;
harness.notify_initialized()?;
- let initial_status = harness.call_tool(30, "project.status", json!({}))?;
+ let initial_status = harness.call_tool(31, "project.status", json!({}))?;
assert_eq!(
tool_content(&initial_status)["project_root"].as_str(),
Some(spinner_root.as_str())
);
- let rebind = harness.bind_project(31, &notes_dir)?;
+ let rebind = harness.bind_project(32, &notes_dir)?;
assert_eq!(rebind["result"]["isError"].as_bool(), Some(false));
assert_eq!(
tool_content(&rebind)["project_root"].as_str(),
Some(libgrid_root.as_str())
);
- let status = harness.call_tool(32, "project.status", json!({}))?;
+ let status = harness.call_tool(33, "project.status", json!({}))?;
assert_eq!(
tool_content(&status)["project_root"].as_str(),
Some(libgrid_root.as_str())
);
let note = harness.call_tool(
- 33,
+ 34,
"note.quick",
json!({
"title": "libgrid dogfood note",
"body": "rebind should redirect writes",
+ "tags": [],
}),
)?;
assert_eq!(note["result"]["isError"].as_bool(), Some(false));
@@ -453,3 +528,57 @@ fn bind_retargets_writes_to_sibling_project_root() -> TestResult {
);
Ok(())
}
+
+#[test]
+fn tag_registry_drives_note_creation_and_lookup() -> TestResult {
+ let project_root = temp_project_root("tag_registry")?;
+ init_project(&project_root)?;
+
+ let mut harness = McpHarness::spawn(None, &[])?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+ let bind = harness.bind_project(40, &project_root)?;
+ assert_eq!(bind["result"]["isError"].as_bool(), Some(false));
+
+ let missing_tags = harness.call_tool(
+ 41,
+ "note.quick",
+ json!({
+ "title": "untagged",
+ "body": "should fail",
+ }),
+ )?;
+ assert_eq!(missing_tags["result"]["isError"].as_bool(), Some(true));
+
+ let tag = harness.call_tool(
+ 42,
+ "tag.add",
+ json!({
+ "name": "dogfood/mcp",
+ "description": "MCP dogfood observations",
+ }),
+ )?;
+ assert_eq!(tag["result"]["isError"].as_bool(), Some(false));
+
+ let tag_list = harness.call_tool(43, "tag.list", json!({}))?;
+ let tags = must_some(tool_content(&tag_list).as_array(), "tag list")?;
+ assert_eq!(tags.len(), 1);
+ assert_eq!(tags[0]["name"].as_str(), Some("dogfood/mcp"));
+
+ let note = harness.call_tool(
+ 44,
+ "note.quick",
+ json!({
+ "title": "tagged note",
+ "body": "tagged lookup should work",
+ "tags": ["dogfood/mcp"],
+ }),
+ )?;
+ assert_eq!(note["result"]["isError"].as_bool(), Some(false));
+
+ let filtered = harness.call_tool(45, "node.list", json!({"tags": ["dogfood/mcp"]}))?;
+ let nodes = must_some(tool_content(&filtered).as_array(), "filtered nodes")?;
+ assert_eq!(nodes.len(), 1);
+ assert_eq!(nodes[0]["tags"][0].as_str(), Some("dogfood/mcp"));
+ Ok(())
+}