swarm repositories / source
aboutsummaryrefslogtreecommitdiff
path: root/crates/fidget-spinner-cli/src
diff options
context:
space:
mode:
authormain <main@swarm.moe>2026-03-19 10:15:18 -0400
committermain <main@swarm.moe>2026-03-19 10:15:18 -0400
commit7b9bd8b42883f82b090718175b8316296ef18236 (patch)
tree16f2c70b0f630c7757d72a20bd90d17c2e3a8414 /crates/fidget-spinner-cli/src
downloadfidget_spinner-7b9bd8b42883f82b090718175b8316296ef18236.zip
Initial Fidget Spinner MVP
Diffstat (limited to 'crates/fidget-spinner-cli/src')
-rw-r--r--crates/fidget-spinner-cli/src/bundled_skill.rs69
-rw-r--r--crates/fidget-spinner-cli/src/main.rs958
-rw-r--r--crates/fidget-spinner-cli/src/mcp/catalog.rs541
-rw-r--r--crates/fidget-spinner-cli/src/mcp/fault.rs99
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/binary.rs43
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/config.rs18
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/mod.rs8
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/process.rs246
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/runtime.rs719
-rw-r--r--crates/fidget-spinner-cli/src/mcp/mod.rs10
-rw-r--r--crates/fidget-spinner-cli/src/mcp/protocol.rs86
-rw-r--r--crates/fidget-spinner-cli/src/mcp/service.rs813
-rw-r--r--crates/fidget-spinner-cli/src/mcp/telemetry.rs103
-rw-r--r--crates/fidget-spinner-cli/src/mcp/worker.rs66
14 files changed, 3779 insertions, 0 deletions
diff --git a/crates/fidget-spinner-cli/src/bundled_skill.rs b/crates/fidget-spinner-cli/src/bundled_skill.rs
new file mode 100644
index 0000000..85760bc
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/bundled_skill.rs
@@ -0,0 +1,69 @@
+use serde::Serialize;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) struct BundledSkill {
+ pub name: &'static str,
+ pub description: &'static str,
+ pub resource_uri: &'static str,
+ pub body: &'static str,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize)]
+pub(crate) struct BundledSkillSummary {
+ pub name: &'static str,
+ pub description: &'static str,
+ pub resource_uri: &'static str,
+}
+
+impl BundledSkill {
+ #[must_use]
+ pub const fn summary(self) -> BundledSkillSummary {
+ BundledSkillSummary {
+ name: self.name,
+ description: self.description,
+ resource_uri: self.resource_uri,
+ }
+ }
+}
+
+const BUNDLED_SKILLS: [BundledSkill; 2] = [
+ BundledSkill {
+ name: "fidget-spinner",
+ description: "Base skill for working inside a Fidget Spinner project through the local DAG and MCP surface.",
+ resource_uri: "fidget-spinner://skill/fidget-spinner",
+ body: include_str!("../../../assets/codex-skills/fidget-spinner/SKILL.md"),
+ },
+ BundledSkill {
+ name: "frontier-loop",
+ description: "Aggressive autonomous frontier-push specialization for Fidget Spinner.",
+ resource_uri: "fidget-spinner://skill/frontier-loop",
+ body: include_str!("../../../assets/codex-skills/frontier-loop/SKILL.md"),
+ },
+];
+
+#[must_use]
+pub(crate) const fn default_bundled_skill() -> BundledSkill {
+ BUNDLED_SKILLS[0]
+}
+
+#[must_use]
+pub(crate) const fn frontier_loop_bundled_skill() -> BundledSkill {
+ BUNDLED_SKILLS[1]
+}
+
+#[must_use]
+pub(crate) fn bundled_skill(name: &str) -> Option<BundledSkill> {
+ BUNDLED_SKILLS
+ .iter()
+ .copied()
+ .find(|skill| skill.name == name)
+}
+
+#[must_use]
+pub(crate) fn bundled_skill_summaries() -> Vec<BundledSkillSummary> {
+ BUNDLED_SKILLS
+ .iter()
+ .copied()
+ .map(BundledSkill::summary)
+ .collect()
+}
diff --git a/crates/fidget-spinner-cli/src/main.rs b/crates/fidget-spinner-cli/src/main.rs
new file mode 100644
index 0000000..9b2b8ae
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/main.rs
@@ -0,0 +1,958 @@
+mod bundled_skill;
+mod mcp;
+
+use std::collections::{BTreeMap, BTreeSet};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use camino::{Utf8Path, Utf8PathBuf};
+use clap::{Args, Parser, Subcommand, ValueEnum};
+use fidget_spinner_core::{
+ AnnotationVisibility, CodeSnapshotRef, CommandRecipe, ExecutionBackend, FrontierContract,
+ FrontierNote, FrontierVerdict, GitCommitHash, MetricObservation, MetricSpec, MetricUnit,
+ NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective,
+};
+use fidget_spinner_store_sqlite::{
+ CloseExperimentRequest, CreateFrontierRequest, CreateNodeRequest, EdgeAttachment,
+ EdgeAttachmentDirection, ListNodesQuery, ProjectStore, StoreError,
+};
+use serde::Serialize;
+use serde_json::{Map, Value, json};
+use uuid::Uuid;
+
+#[derive(Parser)]
+#[command(author, version, about = "Fidget Spinner local project CLI")]
+struct Cli {
+ #[command(subcommand)]
+ command: Command,
+}
+
+#[derive(Subcommand)]
+enum Command {
+ Init(InitArgs),
+ Schema {
+ #[command(subcommand)]
+ command: SchemaCommand,
+ },
+ Frontier {
+ #[command(subcommand)]
+ command: FrontierCommand,
+ },
+ Node {
+ #[command(subcommand)]
+ command: NodeCommand,
+ },
+ Note(NoteCommand),
+ Research(ResearchCommand),
+ Experiment {
+ #[command(subcommand)]
+ command: ExperimentCommand,
+ },
+ Mcp {
+ #[command(subcommand)]
+ command: McpCommand,
+ },
+ Skill {
+ #[command(subcommand)]
+ command: SkillCommand,
+ },
+}
+
+#[derive(Args)]
+struct InitArgs {
+ #[arg(long, default_value = ".")]
+ project: PathBuf,
+ #[arg(long)]
+ name: Option<String>,
+ #[arg(long, default_value = "local.project")]
+ namespace: String,
+}
+
+#[derive(Subcommand)]
+enum SchemaCommand {
+ Show(ProjectArg),
+}
+
+#[derive(Subcommand)]
+enum FrontierCommand {
+ Init(FrontierInitArgs),
+ Status(FrontierStatusArgs),
+}
+
+#[derive(Args)]
+struct FrontierInitArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ label: String,
+ #[arg(long)]
+ objective: String,
+ #[arg(long, default_value = "frontier contract")]
+ contract_title: String,
+ #[arg(long)]
+ contract_summary: Option<String>,
+ #[arg(long = "benchmark-suite")]
+ benchmark_suites: Vec<String>,
+ #[arg(long = "promotion-criterion")]
+ promotion_criteria: Vec<String>,
+ #[arg(long = "primary-metric-key")]
+ primary_metric_key: String,
+ #[arg(long = "primary-metric-unit", value_enum)]
+ primary_metric_unit: CliMetricUnit,
+ #[arg(long = "primary-metric-objective", value_enum)]
+ primary_metric_objective: CliOptimizationObjective,
+ #[arg(long = "seed-summary", default_value = "initial champion checkpoint")]
+ seed_summary: String,
+}
+
+#[derive(Args)]
+struct FrontierStatusArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: Option<String>,
+}
+
+#[derive(Subcommand)]
+enum NodeCommand {
+ Add(NodeAddArgs),
+ List(NodeListArgs),
+ Show(NodeShowArgs),
+ Annotate(NodeAnnotateArgs),
+ Archive(NodeArchiveArgs),
+}
+
+#[derive(Args)]
+struct NodeAddArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long, value_enum)]
+ class: CliNodeClass,
+ #[arg(long)]
+ frontier: Option<String>,
+ #[arg(long)]
+ title: String,
+ #[arg(long)]
+ summary: Option<String>,
+ #[arg(long = "payload-json")]
+ payload_json: Option<String>,
+ #[arg(long = "payload-file")]
+ payload_file: Option<PathBuf>,
+ #[arg(long = "field")]
+ fields: Vec<String>,
+ #[arg(long = "annotation")]
+ annotations: Vec<String>,
+ #[arg(long = "parent")]
+ parents: Vec<String>,
+}
+
+#[derive(Args)]
+struct NodeListArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: Option<String>,
+ #[arg(long, value_enum)]
+ class: Option<CliNodeClass>,
+ #[arg(long)]
+ include_archived: bool,
+ #[arg(long, default_value_t = 20)]
+ limit: u32,
+}
+
+#[derive(Args)]
+struct NodeShowArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ node: String,
+}
+
+#[derive(Args)]
+struct NodeAnnotateArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ node: String,
+ #[arg(long)]
+ body: String,
+ #[arg(long)]
+ label: Option<String>,
+ #[arg(long)]
+ visible: bool,
+}
+
+#[derive(Args)]
+struct NodeArchiveArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ node: String,
+}
+
+#[derive(Args)]
+struct NoteCommand {
+ #[command(subcommand)]
+ command: NoteSubcommand,
+}
+
+#[derive(Subcommand)]
+enum NoteSubcommand {
+ Quick(QuickNoteArgs),
+}
+
+#[derive(Args)]
+struct ResearchCommand {
+ #[command(subcommand)]
+ command: ResearchSubcommand,
+}
+
+#[derive(Subcommand)]
+enum ResearchSubcommand {
+ Add(QuickResearchArgs),
+}
+
+#[derive(Args)]
+struct QuickNoteArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: Option<String>,
+ #[arg(long)]
+ title: String,
+ #[arg(long)]
+ body: String,
+ #[arg(long = "parent")]
+ parents: Vec<String>,
+}
+
+#[derive(Args)]
+struct QuickResearchArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: Option<String>,
+ #[arg(long)]
+ title: String,
+ #[arg(long)]
+ body: String,
+ #[arg(long)]
+ summary: Option<String>,
+ #[arg(long = "parent")]
+ parents: Vec<String>,
+}
+
+#[derive(Subcommand)]
+enum ExperimentCommand {
+ Close(ExperimentCloseArgs),
+}
+
+#[derive(Subcommand)]
+enum McpCommand {
+ Serve(McpServeArgs),
+ #[command(hide = true)]
+ Worker(McpWorkerArgs),
+}
+
+#[derive(Args)]
+struct ExperimentCloseArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: String,
+ #[arg(long = "base-checkpoint")]
+ base_checkpoint: String,
+ #[arg(long = "change-node")]
+ change_node: String,
+ #[arg(long = "candidate-summary")]
+ candidate_summary: String,
+ #[arg(long = "run-title")]
+ run_title: String,
+ #[arg(long = "run-summary")]
+ run_summary: Option<String>,
+ #[arg(long = "benchmark-suite")]
+ benchmark_suite: String,
+ #[arg(long = "backend", value_enum, default_value_t = CliExecutionBackend::Worktree)]
+ backend: CliExecutionBackend,
+ #[arg(long = "cwd")]
+ working_directory: Option<PathBuf>,
+ #[arg(long = "argv")]
+ argv: Vec<String>,
+ #[arg(long = "env")]
+ env: Vec<String>,
+ #[arg(long = "primary-metric-key")]
+ primary_metric_key: String,
+ #[arg(long = "primary-metric-unit", value_enum)]
+ primary_metric_unit: CliMetricUnit,
+ #[arg(long = "primary-metric-objective", value_enum)]
+ primary_metric_objective: CliOptimizationObjective,
+ #[arg(long = "primary-metric-value")]
+ primary_metric_value: f64,
+ #[arg(long = "metric")]
+ metrics: Vec<String>,
+ #[arg(long)]
+ note: String,
+ #[arg(long = "next-hypothesis")]
+ next_hypotheses: Vec<String>,
+ #[arg(long = "verdict", value_enum)]
+ verdict: CliFrontierVerdict,
+ #[arg(long = "decision-title")]
+ decision_title: String,
+ #[arg(long = "decision-rationale")]
+ decision_rationale: String,
+}
+
+#[derive(Subcommand)]
+enum SkillCommand {
+ List,
+ Install(SkillInstallArgs),
+ Show(SkillShowArgs),
+}
+
+#[derive(Args)]
+struct SkillInstallArgs {
+ #[arg(long)]
+ name: Option<String>,
+ #[arg(long)]
+ destination: Option<PathBuf>,
+}
+
+#[derive(Args)]
+struct SkillShowArgs {
+ #[arg(long)]
+ name: Option<String>,
+}
+
+#[derive(Args)]
+struct ProjectArg {
+ #[arg(long, default_value = ".")]
+ project: PathBuf,
+}
+
+#[derive(Args)]
+struct McpServeArgs {
+ #[arg(long)]
+ project: Option<PathBuf>,
+}
+
+#[derive(Args)]
+struct McpWorkerArgs {
+ #[arg(long)]
+ project: PathBuf,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliNodeClass {
+ Contract,
+ Change,
+ Run,
+ Analysis,
+ Decision,
+ Research,
+ Enabling,
+ Note,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliMetricUnit {
+ Seconds,
+ Bytes,
+ Count,
+ Ratio,
+ Custom,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliOptimizationObjective {
+ Minimize,
+ Maximize,
+ Target,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliExecutionBackend {
+ Local,
+ Worktree,
+ Ssh,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliFrontierVerdict {
+ PromoteToChampion,
+ KeepOnFrontier,
+ RevertToChampion,
+ ArchiveDeadEnd,
+ NeedsMoreEvidence,
+}
+
+fn main() {
+ if let Err(error) = run() {
+ eprintln!("error: {error}");
+ std::process::exit(1);
+ }
+}
+
+fn run() -> Result<(), StoreError> {
+ let cli = Cli::parse();
+ match cli.command {
+ Command::Init(args) => run_init(args),
+ Command::Schema { command } => match command {
+ SchemaCommand::Show(project) => {
+ let store = open_store(&project.project)?;
+ print_json(store.schema())
+ }
+ },
+ Command::Frontier { command } => match command {
+ FrontierCommand::Init(args) => run_frontier_init(args),
+ FrontierCommand::Status(args) => run_frontier_status(args),
+ },
+ Command::Node { command } => match command {
+ NodeCommand::Add(args) => run_node_add(args),
+ NodeCommand::List(args) => run_node_list(args),
+ NodeCommand::Show(args) => run_node_show(args),
+ NodeCommand::Annotate(args) => run_node_annotate(args),
+ NodeCommand::Archive(args) => run_node_archive(args),
+ },
+ Command::Note(command) => match command.command {
+ NoteSubcommand::Quick(args) => run_quick_note(args),
+ },
+ Command::Research(command) => match command.command {
+ ResearchSubcommand::Add(args) => run_quick_research(args),
+ },
+ Command::Experiment { command } => match command {
+ ExperimentCommand::Close(args) => run_experiment_close(args),
+ },
+ Command::Mcp { command } => match command {
+ McpCommand::Serve(args) => mcp::serve(args.project),
+ McpCommand::Worker(args) => mcp::serve_worker(args.project),
+ },
+ Command::Skill { command } => match command {
+ SkillCommand::List => print_json(&bundled_skill::bundled_skill_summaries()),
+ SkillCommand::Install(args) => run_skill_install(args),
+ SkillCommand::Show(args) => {
+ println!("{}", resolve_bundled_skill(args.name.as_deref())?.body);
+ Ok(())
+ }
+ },
+ }
+}
+
+fn run_init(args: InitArgs) -> Result<(), StoreError> {
+ let project_root = utf8_path(args.project);
+ let display_name = NonEmptyText::new(args.name.unwrap_or_else(|| {
+ project_root
+ .file_name()
+ .map_or_else(|| "fidget-spinner-project".to_owned(), ToOwned::to_owned)
+ }))?;
+ let namespace = NonEmptyText::new(args.namespace)?;
+ let store = ProjectStore::init(&project_root, display_name, namespace)?;
+ println!("initialized {}", store.state_root());
+ println!("project: {}", store.config().display_name);
+ println!("schema: {}", store.state_root().join("schema.json"));
+ Ok(())
+}
+
+fn run_frontier_init(args: FrontierInitArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let initial_checkpoint =
+ store.auto_capture_checkpoint(NonEmptyText::new(args.seed_summary)?)?;
+ let projection = store.create_frontier(CreateFrontierRequest {
+ label: NonEmptyText::new(args.label)?,
+ contract_title: NonEmptyText::new(args.contract_title)?,
+ contract_summary: args.contract_summary.map(NonEmptyText::new).transpose()?,
+ contract: FrontierContract {
+ objective: NonEmptyText::new(args.objective)?,
+ evaluation: fidget_spinner_core::EvaluationProtocol {
+ benchmark_suites: to_text_set(args.benchmark_suites)?,
+ primary_metric: MetricSpec {
+ metric_key: NonEmptyText::new(args.primary_metric_key)?,
+ unit: args.primary_metric_unit.into(),
+ objective: args.primary_metric_objective.into(),
+ },
+ supporting_metrics: BTreeSet::new(),
+ },
+ promotion_criteria: to_text_vec(args.promotion_criteria)?,
+ },
+ initial_checkpoint,
+ })?;
+ print_json(&projection)
+}
+
+fn run_frontier_status(args: FrontierStatusArgs) -> Result<(), StoreError> {
+ let store = open_store(&args.project.project)?;
+ if let Some(frontier) = args.frontier {
+ let projection = store.frontier_projection(parse_frontier_id(&frontier)?)?;
+ return print_json(&projection);
+ }
+ let frontiers = store.list_frontiers()?;
+ if frontiers.len() == 1 {
+ return print_json(&store.frontier_projection(frontiers[0].id)?);
+ }
+ print_json(&frontiers)
+}
+
+fn run_node_add(args: NodeAddArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let frontier_id = args
+ .frontier
+ .as_deref()
+ .map(parse_frontier_id)
+ .transpose()?;
+ let payload = load_payload(
+ store.schema().schema_ref(),
+ args.payload_json,
+ args.payload_file,
+ args.fields,
+ )?;
+ let annotations = args
+ .annotations
+ .into_iter()
+ .map(|body| Ok(NodeAnnotation::hidden(NonEmptyText::new(body)?)))
+ .collect::<Result<Vec<_>, StoreError>>()?;
+ let node = store.add_node(CreateNodeRequest {
+ class: args.class.into(),
+ frontier_id,
+ title: NonEmptyText::new(args.title)?,
+ summary: args.summary.map(NonEmptyText::new).transpose()?,
+ payload,
+ annotations,
+ attachments: lineage_attachments(args.parents)?,
+ })?;
+ print_json(&node)
+}
+
+fn run_node_list(args: NodeListArgs) -> Result<(), StoreError> {
+ let store = open_store(&args.project.project)?;
+ let items = store.list_nodes(ListNodesQuery {
+ frontier_id: args
+ .frontier
+ .as_deref()
+ .map(parse_frontier_id)
+ .transpose()?,
+ class: args.class.map(Into::into),
+ include_archived: args.include_archived,
+ limit: args.limit,
+ })?;
+ print_json(&items)
+}
+
+fn run_node_show(args: NodeShowArgs) -> Result<(), StoreError> {
+ let store = open_store(&args.project.project)?;
+ let node_id = parse_node_id(&args.node)?;
+ let node = store
+ .get_node(node_id)?
+ .ok_or(StoreError::NodeNotFound(node_id))?;
+ print_json(&node)
+}
+
+fn run_node_annotate(args: NodeAnnotateArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let annotation = NodeAnnotation {
+ id: fidget_spinner_core::AnnotationId::fresh(),
+ visibility: if args.visible {
+ AnnotationVisibility::Visible
+ } else {
+ AnnotationVisibility::HiddenByDefault
+ },
+ label: args.label.map(NonEmptyText::new).transpose()?,
+ body: NonEmptyText::new(args.body)?,
+ created_at: time::OffsetDateTime::now_utc(),
+ };
+ store.annotate_node(parse_node_id(&args.node)?, annotation)?;
+ println!("annotated {}", args.node);
+ Ok(())
+}
+
+fn run_node_archive(args: NodeArchiveArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ store.archive_node(parse_node_id(&args.node)?)?;
+ println!("archived {}", args.node);
+ Ok(())
+}
+
+fn run_quick_note(args: QuickNoteArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let payload = NodePayload::with_schema(
+ store.schema().schema_ref(),
+ json_object(json!({ "body": args.body }))?,
+ );
+ let node = store.add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: args
+ .frontier
+ .as_deref()
+ .map(parse_frontier_id)
+ .transpose()?,
+ title: NonEmptyText::new(args.title)?,
+ summary: None,
+ payload,
+ annotations: Vec::new(),
+ attachments: lineage_attachments(args.parents)?,
+ })?;
+ print_json(&node)
+}
+
+fn run_quick_research(args: QuickResearchArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let payload = NodePayload::with_schema(
+ store.schema().schema_ref(),
+ json_object(json!({ "body": args.body }))?,
+ );
+ let node = store.add_node(CreateNodeRequest {
+ class: NodeClass::Research,
+ frontier_id: args
+ .frontier
+ .as_deref()
+ .map(parse_frontier_id)
+ .transpose()?,
+ title: NonEmptyText::new(args.title)?,
+ summary: args.summary.map(NonEmptyText::new).transpose()?,
+ payload,
+ annotations: Vec::new(),
+ attachments: lineage_attachments(args.parents)?,
+ })?;
+ print_json(&node)
+}
+
+fn run_experiment_close(args: ExperimentCloseArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let frontier_id = parse_frontier_id(&args.frontier)?;
+ let snapshot = store
+ .auto_capture_checkpoint(NonEmptyText::new(args.candidate_summary.clone())?)?
+ .map(|seed| seed.snapshot)
+ .ok_or(StoreError::GitInspectionFailed(
+ store.project_root().to_path_buf(),
+ ))?;
+ let command = CommandRecipe::new(
+ args.working_directory
+ .map(utf8_path)
+ .unwrap_or_else(|| store.project_root().to_path_buf()),
+ to_text_vec(args.argv)?,
+ parse_env(args.env),
+ )?;
+ let receipt = store.close_experiment(CloseExperimentRequest {
+ frontier_id,
+ base_checkpoint_id: parse_checkpoint_id(&args.base_checkpoint)?,
+ change_node_id: parse_node_id(&args.change_node)?,
+ candidate_summary: NonEmptyText::new(args.candidate_summary)?,
+ candidate_snapshot: snapshot,
+ run_title: NonEmptyText::new(args.run_title)?,
+ run_summary: args.run_summary.map(NonEmptyText::new).transpose()?,
+ backend: args.backend.into(),
+ benchmark_suite: NonEmptyText::new(args.benchmark_suite)?,
+ command,
+ code_snapshot: Some(capture_code_snapshot(store.project_root())?),
+ primary_metric: MetricObservation {
+ metric_key: NonEmptyText::new(args.primary_metric_key)?,
+ unit: args.primary_metric_unit.into(),
+ objective: args.primary_metric_objective.into(),
+ value: args.primary_metric_value,
+ },
+ supporting_metrics: args
+ .metrics
+ .into_iter()
+ .map(parse_metric_observation)
+ .collect::<Result<Vec<_>, _>>()?,
+ note: FrontierNote {
+ summary: NonEmptyText::new(args.note)?,
+ next_hypotheses: to_text_vec(args.next_hypotheses)?,
+ },
+ verdict: args.verdict.into(),
+ decision_title: NonEmptyText::new(args.decision_title)?,
+ decision_rationale: NonEmptyText::new(args.decision_rationale)?,
+ analysis_node_id: None,
+ })?;
+ print_json(&receipt)
+}
+
+fn run_skill_install(args: SkillInstallArgs) -> Result<(), StoreError> {
+ if let Some(name) = args.name.as_deref() {
+ let skill = resolve_bundled_skill(Some(name))?;
+ let destination = args
+ .destination
+ .unwrap_or(default_skill_root()?.join(skill.name));
+ install_skill(skill, &destination)?;
+ println!("{}", destination.display());
+ } else {
+ let destination_root = args.destination.unwrap_or(default_skill_root()?);
+ for skill in bundled_skill::bundled_skill_summaries() {
+ let destination = destination_root.join(skill.name);
+ install_skill(resolve_bundled_skill(Some(skill.name))?, &destination)?;
+ println!("{}", destination.display());
+ }
+ }
+ Ok(())
+}
+
+fn resolve_bundled_skill(
+ requested_name: Option<&str>,
+) -> Result<bundled_skill::BundledSkill, StoreError> {
+ requested_name.map_or_else(
+ || Ok(bundled_skill::default_bundled_skill()),
+ |name| {
+ bundled_skill::bundled_skill(name)
+ .ok_or_else(|| invalid_input(format!("unknown bundled skill `{name}`")))
+ },
+ )
+}
+
+fn default_skill_root() -> Result<PathBuf, StoreError> {
+ dirs::home_dir()
+ .map(|home| home.join(".codex/skills"))
+ .ok_or_else(|| invalid_input("home directory not found"))
+}
+
+fn install_skill(skill: bundled_skill::BundledSkill, destination: &Path) -> Result<(), StoreError> {
+ fs::create_dir_all(destination)?;
+ fs::write(destination.join("SKILL.md"), skill.body)?;
+ Ok(())
+}
+
+fn open_store(path: &Path) -> Result<ProjectStore, StoreError> {
+ ProjectStore::open(utf8_path(path.to_path_buf()))
+}
+
+fn utf8_path(path: impl Into<PathBuf>) -> Utf8PathBuf {
+ Utf8PathBuf::from(path.into().to_string_lossy().into_owned())
+}
+
+fn to_text_vec(values: Vec<String>) -> Result<Vec<NonEmptyText>, StoreError> {
+ values
+ .into_iter()
+ .map(NonEmptyText::new)
+ .collect::<Result<Vec<_>, _>>()
+ .map_err(StoreError::from)
+}
+
+fn to_text_set(values: Vec<String>) -> Result<BTreeSet<NonEmptyText>, StoreError> {
+ to_text_vec(values).map(BTreeSet::from_iter)
+}
+
+fn parse_env(values: Vec<String>) -> BTreeMap<String, String> {
+ values
+ .into_iter()
+ .filter_map(|entry| {
+ let (key, value) = entry.split_once('=')?;
+ Some((key.to_owned(), value.to_owned()))
+ })
+ .collect()
+}
+
+fn lineage_attachments(parents: Vec<String>) -> Result<Vec<EdgeAttachment>, StoreError> {
+ parents
+ .into_iter()
+ .map(|parent| {
+ Ok(EdgeAttachment {
+ node_id: parse_node_id(&parent)?,
+ kind: fidget_spinner_core::EdgeKind::Lineage,
+ direction: EdgeAttachmentDirection::ExistingToNew,
+ })
+ })
+ .collect()
+}
+
+fn load_payload(
+ schema: fidget_spinner_core::PayloadSchemaRef,
+ payload_json: Option<String>,
+ payload_file: Option<PathBuf>,
+ fields: Vec<String>,
+) -> Result<NodePayload, StoreError> {
+ let mut map = Map::new();
+ if let Some(text) = payload_json {
+ map.extend(json_object(serde_json::from_str::<Value>(&text)?)?);
+ }
+ if let Some(path) = payload_file {
+ let text = fs::read_to_string(path)?;
+ map.extend(json_object(serde_json::from_str::<Value>(&text)?)?);
+ }
+ for field in fields {
+ let Some((key, raw_value)) = field.split_once('=') else {
+ continue;
+ };
+ let value = serde_json::from_str::<Value>(raw_value).unwrap_or_else(|_| json!(raw_value));
+ let _ = map.insert(key.to_owned(), value);
+ }
+ Ok(NodePayload::with_schema(schema, map))
+}
+
+fn json_object(value: Value) -> Result<Map<String, Value>, StoreError> {
+ match value {
+ Value::Object(map) => Ok(map),
+ other => Err(invalid_input(format!(
+ "expected JSON object, got {other:?}"
+ ))),
+ }
+}
+
+fn capture_code_snapshot(project_root: &Utf8Path) -> Result<CodeSnapshotRef, StoreError> {
+ let head_commit = run_git(project_root, &["rev-parse", "HEAD"])?;
+ let dirty_paths = run_git(project_root, &["status", "--porcelain"])?
+ .map(|status| {
+ status
+ .lines()
+ .filter_map(|line| line.get(3..).map(str::trim))
+ .filter(|line| !line.is_empty())
+ .map(Utf8PathBuf::from)
+ .collect::<BTreeSet<_>>()
+ })
+ .unwrap_or_default();
+ Ok(CodeSnapshotRef {
+ repo_root: run_git(project_root, &["rev-parse", "--show-toplevel"])?
+ .map(Utf8PathBuf::from)
+ .unwrap_or_else(|| project_root.to_path_buf()),
+ worktree_root: project_root.to_path_buf(),
+ worktree_name: run_git(project_root, &["rev-parse", "--abbrev-ref", "HEAD"])?
+ .map(NonEmptyText::new)
+ .transpose()?,
+ head_commit: head_commit.map(GitCommitHash::new).transpose()?,
+ dirty_paths,
+ })
+}
+
+fn run_git(project_root: &Utf8Path, args: &[&str]) -> Result<Option<String>, StoreError> {
+ let output = std::process::Command::new("git")
+ .arg("-C")
+ .arg(project_root.as_str())
+ .args(args)
+ .output()?;
+ if !output.status.success() {
+ return Ok(None);
+ }
+ let text = String::from_utf8_lossy(&output.stdout).trim().to_owned();
+ if text.is_empty() {
+ return Ok(None);
+ }
+ Ok(Some(text))
+}
+
+fn parse_metric_observation(raw: String) -> Result<MetricObservation, StoreError> {
+ let parts = raw.split(':').collect::<Vec<_>>();
+ if parts.len() != 4 {
+ return Err(invalid_input(
+ "metrics must look like key:unit:objective:value",
+ ));
+ }
+ Ok(MetricObservation {
+ metric_key: NonEmptyText::new(parts[0])?,
+ unit: parse_metric_unit(parts[1])?,
+ objective: parse_optimization_objective(parts[2])?,
+ value: parts[3]
+ .parse::<f64>()
+ .map_err(|error| invalid_input(format!("invalid metric value: {error}")))?,
+ })
+}
+
+fn parse_metric_unit(raw: &str) -> Result<MetricUnit, StoreError> {
+ match raw {
+ "seconds" => Ok(MetricUnit::Seconds),
+ "bytes" => Ok(MetricUnit::Bytes),
+ "count" => Ok(MetricUnit::Count),
+ "ratio" => Ok(MetricUnit::Ratio),
+ "custom" => Ok(MetricUnit::Custom),
+ other => Err(invalid_input(format!("unknown metric unit `{other}`"))),
+ }
+}
+
+fn parse_optimization_objective(raw: &str) -> Result<OptimizationObjective, StoreError> {
+ match raw {
+ "minimize" => Ok(OptimizationObjective::Minimize),
+ "maximize" => Ok(OptimizationObjective::Maximize),
+ "target" => Ok(OptimizationObjective::Target),
+ other => Err(invalid_input(format!(
+ "unknown optimization objective `{other}`"
+ ))),
+ }
+}
+
+fn parse_node_id(raw: &str) -> Result<fidget_spinner_core::NodeId, StoreError> {
+ Ok(fidget_spinner_core::NodeId::from_uuid(Uuid::parse_str(
+ raw,
+ )?))
+}
+
+fn parse_frontier_id(raw: &str) -> Result<fidget_spinner_core::FrontierId, StoreError> {
+ Ok(fidget_spinner_core::FrontierId::from_uuid(Uuid::parse_str(
+ raw,
+ )?))
+}
+
+fn parse_checkpoint_id(raw: &str) -> Result<fidget_spinner_core::CheckpointId, StoreError> {
+ Ok(fidget_spinner_core::CheckpointId::from_uuid(
+ Uuid::parse_str(raw)?,
+ ))
+}
+
+fn print_json<T: Serialize>(value: &T) -> Result<(), StoreError> {
+ println!("{}", to_pretty_json(value)?);
+ Ok(())
+}
+
+fn to_pretty_json<T: Serialize>(value: &T) -> Result<String, StoreError> {
+ serde_json::to_string_pretty(value).map_err(StoreError::from)
+}
+
+fn invalid_input(message: impl Into<String>) -> StoreError {
+ StoreError::Json(serde_json::Error::io(std::io::Error::new(
+ std::io::ErrorKind::InvalidInput,
+ message.into(),
+ )))
+}
+
+impl From<CliNodeClass> for NodeClass {
+ fn from(value: CliNodeClass) -> Self {
+ match value {
+ CliNodeClass::Contract => Self::Contract,
+ CliNodeClass::Change => Self::Change,
+ CliNodeClass::Run => Self::Run,
+ CliNodeClass::Analysis => Self::Analysis,
+ CliNodeClass::Decision => Self::Decision,
+ CliNodeClass::Research => Self::Research,
+ CliNodeClass::Enabling => Self::Enabling,
+ CliNodeClass::Note => Self::Note,
+ }
+ }
+}
+
+impl From<CliMetricUnit> for MetricUnit {
+ fn from(value: CliMetricUnit) -> Self {
+ match value {
+ CliMetricUnit::Seconds => Self::Seconds,
+ CliMetricUnit::Bytes => Self::Bytes,
+ CliMetricUnit::Count => Self::Count,
+ CliMetricUnit::Ratio => Self::Ratio,
+ CliMetricUnit::Custom => Self::Custom,
+ }
+ }
+}
+
+impl From<CliOptimizationObjective> for OptimizationObjective {
+ fn from(value: CliOptimizationObjective) -> Self {
+ match value {
+ CliOptimizationObjective::Minimize => Self::Minimize,
+ CliOptimizationObjective::Maximize => Self::Maximize,
+ CliOptimizationObjective::Target => Self::Target,
+ }
+ }
+}
+
+impl From<CliExecutionBackend> for ExecutionBackend {
+ fn from(value: CliExecutionBackend) -> Self {
+ match value {
+ CliExecutionBackend::Local => Self::LocalProcess,
+ CliExecutionBackend::Worktree => Self::WorktreeProcess,
+ CliExecutionBackend::Ssh => Self::SshProcess,
+ }
+ }
+}
+
+impl From<CliFrontierVerdict> for FrontierVerdict {
+ fn from(value: CliFrontierVerdict) -> Self {
+ match value {
+ CliFrontierVerdict::PromoteToChampion => Self::PromoteToChampion,
+ CliFrontierVerdict::KeepOnFrontier => Self::KeepOnFrontier,
+ CliFrontierVerdict::RevertToChampion => Self::RevertToChampion,
+ CliFrontierVerdict::ArchiveDeadEnd => Self::ArchiveDeadEnd,
+ CliFrontierVerdict::NeedsMoreEvidence => Self::NeedsMoreEvidence,
+ }
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/catalog.rs b/crates/fidget-spinner-cli/src/mcp/catalog.rs
new file mode 100644
index 0000000..178b980
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/catalog.rs
@@ -0,0 +1,541 @@
+use serde_json::{Value, json};
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) enum DispatchTarget {
+ Host,
+ Worker,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) enum ReplayContract {
+ SafeReplay,
+ NeverReplay,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) struct ToolSpec {
+ pub name: &'static str,
+ pub description: &'static str,
+ pub dispatch: DispatchTarget,
+ pub replay: ReplayContract,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) struct ResourceSpec {
+ pub uri: &'static str,
+ pub dispatch: DispatchTarget,
+ pub replay: ReplayContract,
+}
+
+impl ToolSpec {
+ #[must_use]
+ pub fn annotation_json(self) -> Value {
+ json!({
+ "title": self.name,
+ "readOnlyHint": self.replay == ReplayContract::SafeReplay,
+ "destructiveHint": self.replay == ReplayContract::NeverReplay,
+ "fidgetSpinner": {
+ "dispatch": match self.dispatch {
+ DispatchTarget::Host => "host",
+ DispatchTarget::Worker => "worker",
+ },
+ "replayContract": match self.replay {
+ ReplayContract::SafeReplay => "safe_replay",
+ ReplayContract::NeverReplay => "never_replay",
+ },
+ }
+ })
+ }
+}
+
+#[must_use]
+pub(crate) fn tool_spec(name: &str) -> Option<ToolSpec> {
+ match name {
+ "project.bind" => Some(ToolSpec {
+ name: "project.bind",
+ description: "Bind this MCP session to a project root or nested path inside a project store.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "project.status" => Some(ToolSpec {
+ name: "project.status",
+ description: "Read local project status, store paths, and git availability for the currently bound project.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "project.schema" => Some(ToolSpec {
+ name: "project.schema",
+ description: "Read the project-local payload schema and field validation tiers.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "frontier.list" => Some(ToolSpec {
+ name: "frontier.list",
+ description: "List frontiers for the current project.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "frontier.status" => Some(ToolSpec {
+ name: "frontier.status",
+ description: "Read one frontier projection, including champion and active candidates.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "frontier.init" => Some(ToolSpec {
+ name: "frontier.init",
+ description: "Create a new frontier rooted in a contract node. If the project is a git repo, the current HEAD becomes the initial champion when possible.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "node.create" => Some(ToolSpec {
+ name: "node.create",
+ description: "Create a generic DAG node with project payload fields and optional lineage parents.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "change.record" => Some(ToolSpec {
+ name: "change.record",
+ description: "Record a core-path change hypothesis with low ceremony.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "node.list" => Some(ToolSpec {
+ name: "node.list",
+ description: "List recent nodes. Archived nodes are hidden unless explicitly requested.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "node.read" => Some(ToolSpec {
+ name: "node.read",
+ description: "Read one node including payload, diagnostics, and hidden annotations.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "node.annotate" => Some(ToolSpec {
+ name: "node.annotate",
+ description: "Attach a free-form annotation to any node.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "node.archive" => Some(ToolSpec {
+ name: "node.archive",
+ description: "Archive a node so it falls out of default enumeration without being deleted.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "note.quick" => Some(ToolSpec {
+ name: "note.quick",
+ description: "Push a quick off-path note without bureaucratic experiment closure.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "research.record" => Some(ToolSpec {
+ name: "research.record",
+ description: "Record off-path research or enabling work that should live in the DAG but not on the bureaucratic core path.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "experiment.close" => Some(ToolSpec {
+ name: "experiment.close",
+ description: "Atomically close a core-path experiment with candidate checkpoint capture, measured result, note, and verdict.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "skill.list" => Some(ToolSpec {
+ name: "skill.list",
+ description: "List bundled skills shipped with this package.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "skill.show" => Some(ToolSpec {
+ name: "skill.show",
+ description: "Return one bundled skill text shipped with this package. Defaults to `fidget-spinner` when name is omitted.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "system.health" => Some(ToolSpec {
+ name: "system.health",
+ description: "Read MCP host health, session binding, worker generation, rollout state, and the last fault.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "system.telemetry" => Some(ToolSpec {
+ name: "system.telemetry",
+ description: "Read aggregate request, retry, restart, and per-operation telemetry for this MCP session.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ _ => None,
+ }
+}
+
+#[must_use]
+pub(crate) fn resource_spec(uri: &str) -> Option<ResourceSpec> {
+ match uri {
+ "fidget-spinner://project/config" => Some(ResourceSpec {
+ uri: "fidget-spinner://project/config",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "fidget-spinner://project/schema" => Some(ResourceSpec {
+ uri: "fidget-spinner://project/schema",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "fidget-spinner://skill/fidget-spinner" => Some(ResourceSpec {
+ uri: "fidget-spinner://skill/fidget-spinner",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "fidget-spinner://skill/frontier-loop" => Some(ResourceSpec {
+ uri: "fidget-spinner://skill/frontier-loop",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ _ => None,
+ }
+}
+
+#[must_use]
+pub(crate) fn tool_definitions() -> Vec<Value> {
+ [
+ "project.bind",
+ "project.status",
+ "project.schema",
+ "frontier.list",
+ "frontier.status",
+ "frontier.init",
+ "node.create",
+ "change.record",
+ "node.list",
+ "node.read",
+ "node.annotate",
+ "node.archive",
+ "note.quick",
+ "research.record",
+ "experiment.close",
+ "skill.list",
+ "skill.show",
+ "system.health",
+ "system.telemetry",
+ ]
+ .into_iter()
+ .filter_map(tool_spec)
+ .map(|spec| {
+ json!({
+ "name": spec.name,
+ "description": spec.description,
+ "inputSchema": input_schema(spec.name),
+ "annotations": spec.annotation_json(),
+ })
+ })
+ .collect()
+}
+
+#[must_use]
+pub(crate) fn list_resources() -> Vec<Value> {
+ vec![
+ json!({
+ "uri": "fidget-spinner://project/config",
+ "name": "project-config",
+ "description": "Project-local store configuration",
+ "mimeType": "application/json"
+ }),
+ json!({
+ "uri": "fidget-spinner://project/schema",
+ "name": "project-schema",
+ "description": "Project-local payload schema and validation tiers",
+ "mimeType": "application/json"
+ }),
+ json!({
+ "uri": "fidget-spinner://skill/fidget-spinner",
+ "name": "fidget-spinner-skill",
+ "description": "Bundled base Fidget Spinner skill text for this package",
+ "mimeType": "text/markdown"
+ }),
+ json!({
+ "uri": "fidget-spinner://skill/frontier-loop",
+ "name": "frontier-loop-skill",
+ "description": "Bundled frontier-loop specialization skill text for this package",
+ "mimeType": "text/markdown"
+ }),
+ ]
+}
+
+fn input_schema(name: &str) -> Value {
+ match name {
+ "project.status" | "project.schema" | "skill.list" | "system.health"
+ | "system.telemetry" => json!({"type":"object","additionalProperties":false}),
+ "project.bind" => json!({
+ "type": "object",
+ "properties": {
+ "path": { "type": "string", "description": "Project root or any nested path inside a project with .fidget_spinner state." }
+ },
+ "required": ["path"],
+ "additionalProperties": false
+ }),
+ "skill.show" => json!({
+ "type": "object",
+ "properties": {
+ "name": { "type": "string", "description": "Bundled skill name. Defaults to `fidget-spinner`." }
+ },
+ "additionalProperties": false
+ }),
+ "frontier.list" => json!({"type":"object","additionalProperties":false}),
+ "frontier.status" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string", "description": "Frontier UUID" }
+ },
+ "required": ["frontier_id"],
+ "additionalProperties": false
+ }),
+ "frontier.init" => json!({
+ "type": "object",
+ "properties": {
+ "label": { "type": "string" },
+ "objective": { "type": "string" },
+ "contract_title": { "type": "string" },
+ "contract_summary": { "type": "string" },
+ "benchmark_suites": { "type": "array", "items": { "type": "string" } },
+ "promotion_criteria": { "type": "array", "items": { "type": "string" } },
+ "primary_metric": metric_spec_schema(),
+ "supporting_metrics": { "type": "array", "items": metric_spec_schema() },
+ "seed_summary": { "type": "string" }
+ },
+ "required": ["label", "objective", "contract_title", "benchmark_suites", "promotion_criteria", "primary_metric"],
+ "additionalProperties": false
+ }),
+ "node.create" => json!({
+ "type": "object",
+ "properties": {
+ "class": node_class_schema(),
+ "frontier_id": { "type": "string" },
+ "title": { "type": "string" },
+ "summary": { "type": "string" },
+ "payload": { "type": "object" },
+ "annotations": { "type": "array", "items": annotation_schema() },
+ "parents": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["class", "title"],
+ "additionalProperties": false
+ }),
+ "change.record" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "title": { "type": "string" },
+ "summary": { "type": "string" },
+ "body": { "type": "string" },
+ "hypothesis": { "type": "string" },
+ "base_checkpoint_id": { "type": "string" },
+ "benchmark_suite": { "type": "string" },
+ "annotations": { "type": "array", "items": annotation_schema() },
+ "parents": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["frontier_id", "title", "body"],
+ "additionalProperties": false
+ }),
+ "node.list" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "class": node_class_schema(),
+ "include_archived": { "type": "boolean" },
+ "limit": { "type": "integer", "minimum": 1, "maximum": 500 }
+ },
+ "additionalProperties": false
+ }),
+ "node.read" | "node.archive" => json!({
+ "type": "object",
+ "properties": {
+ "node_id": { "type": "string" }
+ },
+ "required": ["node_id"],
+ "additionalProperties": false
+ }),
+ "node.annotate" => json!({
+ "type": "object",
+ "properties": {
+ "node_id": { "type": "string" },
+ "body": { "type": "string" },
+ "label": { "type": "string" },
+ "visible": { "type": "boolean" }
+ },
+ "required": ["node_id", "body"],
+ "additionalProperties": false
+ }),
+ "note.quick" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "title": { "type": "string" },
+ "body": { "type": "string" },
+ "annotations": { "type": "array", "items": annotation_schema() },
+ "parents": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["title", "body"],
+ "additionalProperties": false
+ }),
+ "research.record" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "title": { "type": "string" },
+ "summary": { "type": "string" },
+ "body": { "type": "string" },
+ "annotations": { "type": "array", "items": annotation_schema() },
+ "parents": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["title", "body"],
+ "additionalProperties": false
+ }),
+ "experiment.close" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "base_checkpoint_id": { "type": "string" },
+ "change_node_id": { "type": "string" },
+ "candidate_summary": { "type": "string" },
+ "run": run_schema(),
+ "primary_metric": metric_observation_schema(),
+ "supporting_metrics": { "type": "array", "items": metric_observation_schema() },
+ "note": note_schema(),
+ "verdict": verdict_schema(),
+ "decision_title": { "type": "string" },
+ "decision_rationale": { "type": "string" },
+ "analysis_node_id": { "type": "string" }
+ },
+ "required": [
+ "frontier_id",
+ "base_checkpoint_id",
+ "change_node_id",
+ "candidate_summary",
+ "run",
+ "primary_metric",
+ "note",
+ "verdict",
+ "decision_title",
+ "decision_rationale"
+ ],
+ "additionalProperties": false
+ }),
+ _ => json!({"type":"object","additionalProperties":false}),
+ }
+}
+
+fn metric_spec_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "key": { "type": "string" },
+ "unit": metric_unit_schema(),
+ "objective": optimization_objective_schema()
+ },
+ "required": ["key", "unit", "objective"],
+ "additionalProperties": false
+ })
+}
+
+fn metric_observation_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "key": { "type": "string" },
+ "unit": metric_unit_schema(),
+ "objective": optimization_objective_schema(),
+ "value": { "type": "number" }
+ },
+ "required": ["key", "unit", "objective", "value"],
+ "additionalProperties": false
+ })
+}
+
+fn annotation_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "body": { "type": "string" },
+ "label": { "type": "string" },
+ "visible": { "type": "boolean" }
+ },
+ "required": ["body"],
+ "additionalProperties": false
+ })
+}
+
+fn node_class_schema() -> Value {
+ json!({
+ "type": "string",
+ "enum": ["contract", "change", "run", "analysis", "decision", "research", "enabling", "note"]
+ })
+}
+
+fn metric_unit_schema() -> Value {
+ json!({
+ "type": "string",
+ "enum": ["seconds", "bytes", "count", "ratio", "custom"]
+ })
+}
+
+fn optimization_objective_schema() -> Value {
+ json!({
+ "type": "string",
+ "enum": ["minimize", "maximize", "target"]
+ })
+}
+
+fn verdict_schema() -> Value {
+ json!({
+ "type": "string",
+ "enum": [
+ "promote_to_champion",
+ "keep_on_frontier",
+ "revert_to_champion",
+ "archive_dead_end",
+ "needs_more_evidence"
+ ]
+ })
+}
+
+fn run_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "title": { "type": "string" },
+ "summary": { "type": "string" },
+ "backend": {
+ "type": "string",
+ "enum": ["local_process", "worktree_process", "ssh_process"]
+ },
+ "benchmark_suite": { "type": "string" },
+ "command": {
+ "type": "object",
+ "properties": {
+ "working_directory": { "type": "string" },
+ "argv": { "type": "array", "items": { "type": "string" } },
+ "env": {
+ "type": "object",
+ "additionalProperties": { "type": "string" }
+ }
+ },
+ "required": ["argv"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["title", "backend", "benchmark_suite", "command"],
+ "additionalProperties": false
+ })
+}
+
+fn note_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "summary": { "type": "string" },
+ "next_hypotheses": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["summary"],
+ "additionalProperties": false
+ })
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/fault.rs b/crates/fidget-spinner-cli/src/mcp/fault.rs
new file mode 100644
index 0000000..e9d1fce
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/fault.rs
@@ -0,0 +1,99 @@
+use serde::{Deserialize, Serialize};
+use serde_json::{Value, json};
+use time::OffsetDateTime;
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) enum FaultKind {
+ InvalidInput,
+ NotInitialized,
+ Unavailable,
+ Transient,
+ Internal,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) enum FaultStage {
+ Host,
+ Worker,
+ Store,
+ Transport,
+ Protocol,
+ Rollout,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct FaultRecord {
+ pub kind: FaultKind,
+ pub stage: FaultStage,
+ pub operation: String,
+ pub message: String,
+ pub retryable: bool,
+ pub retried: bool,
+ pub worker_generation: Option<u64>,
+ pub occurred_at: OffsetDateTime,
+}
+
+impl FaultRecord {
+ #[must_use]
+ pub fn new(
+ kind: FaultKind,
+ stage: FaultStage,
+ operation: impl Into<String>,
+ message: impl Into<String>,
+ ) -> Self {
+ Self {
+ kind,
+ stage,
+ operation: operation.into(),
+ message: message.into(),
+ retryable: false,
+ retried: false,
+ worker_generation: None,
+ occurred_at: OffsetDateTime::now_utc(),
+ }
+ }
+
+ #[must_use]
+ pub fn retryable(mut self, worker_generation: Option<u64>) -> Self {
+ self.retryable = true;
+ self.worker_generation = worker_generation;
+ self
+ }
+
+ #[must_use]
+ pub fn mark_retried(mut self) -> Self {
+ self.retried = true;
+ self
+ }
+
+ #[must_use]
+ pub fn into_jsonrpc_error(self) -> Value {
+ json!({
+ "code": self.jsonrpc_code(),
+ "message": self.message.clone(),
+ "data": self,
+ })
+ }
+
+ #[must_use]
+ pub fn into_tool_result(self) -> Value {
+ json!({
+ "content": [{
+ "type": "text",
+ "text": self.message,
+ }],
+ "structuredContent": self,
+ "isError": true,
+ })
+ }
+
+ #[must_use]
+ pub const fn jsonrpc_code(&self) -> i64 {
+ match self.kind {
+ FaultKind::InvalidInput => -32602,
+ FaultKind::NotInitialized => -32002,
+ FaultKind::Unavailable => -32004,
+ FaultKind::Transient | FaultKind::Internal => -32603,
+ }
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/host/binary.rs b/crates/fidget-spinner-cli/src/mcp/host/binary.rs
new file mode 100644
index 0000000..2107461
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/binary.rs
@@ -0,0 +1,43 @@
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+
+use fidget_spinner_store_sqlite::StoreError;
+
+use crate::mcp::protocol::BinaryFingerprint;
+
+pub(super) struct BinaryRuntime {
+ pub(super) path: PathBuf,
+ startup_fingerprint: BinaryFingerprint,
+ pub(super) launch_path_stable: bool,
+}
+
+impl BinaryRuntime {
+ pub(super) fn new(path: PathBuf) -> Result<Self, StoreError> {
+ let startup_fingerprint = fingerprint_binary(&path)?;
+ Ok(Self {
+ launch_path_stable: !path
+ .components()
+ .any(|component| component.as_os_str().to_string_lossy() == "target"),
+ path,
+ startup_fingerprint,
+ })
+ }
+
+ pub(super) fn rollout_pending(&self) -> Result<bool, StoreError> {
+ Ok(fingerprint_binary(&self.path)? != self.startup_fingerprint)
+ }
+}
+
+fn fingerprint_binary(path: &Path) -> Result<BinaryFingerprint, StoreError> {
+ let metadata = fs::metadata(path)?;
+ let modified_unix_nanos = metadata
+ .modified()?
+ .duration_since(std::time::UNIX_EPOCH)
+ .map_err(|error| io::Error::other(format!("invalid binary mtime: {error}")))?
+ .as_nanos();
+ Ok(BinaryFingerprint {
+ length_bytes: metadata.len(),
+ modified_unix_nanos,
+ })
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/host/config.rs b/crates/fidget-spinner-cli/src/mcp/host/config.rs
new file mode 100644
index 0000000..8d1ee4b
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/config.rs
@@ -0,0 +1,18 @@
+use std::path::PathBuf;
+
+use fidget_spinner_store_sqlite::StoreError;
+
+#[derive(Clone, Debug)]
+pub(super) struct HostConfig {
+ pub(super) executable: PathBuf,
+ pub(super) initial_project: Option<PathBuf>,
+}
+
+impl HostConfig {
+ pub(super) fn new(initial_project: Option<PathBuf>) -> Result<Self, StoreError> {
+ Ok(Self {
+ executable: std::env::current_exe()?,
+ initial_project,
+ })
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/host/mod.rs b/crates/fidget-spinner-cli/src/mcp/host/mod.rs
new file mode 100644
index 0000000..21c19ec
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/mod.rs
@@ -0,0 +1,8 @@
+//! Stable host process that owns the MCP session and routes store work to a disposable worker.
+
+mod binary;
+mod config;
+mod process;
+mod runtime;
+
+pub(crate) use runtime::run_host as serve;
diff --git a/crates/fidget-spinner-cli/src/mcp/host/process.rs b/crates/fidget-spinner-cli/src/mcp/host/process.rs
new file mode 100644
index 0000000..d4cbb4b
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/process.rs
@@ -0,0 +1,246 @@
+use std::io::{BufRead, BufReader, BufWriter, Write};
+use std::path::PathBuf;
+use std::process::{Child, ChildStdin, ChildStdout, Command, Stdio};
+
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+use crate::mcp::protocol::{
+ HostRequestId, WorkerOperation, WorkerOutcome, WorkerRequest, WorkerResponse, WorkerSpawnConfig,
+};
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(super) struct ProjectBinding {
+ pub(super) requested_path: PathBuf,
+ pub(super) project_root: PathBuf,
+}
+
+pub(super) struct WorkerSupervisor {
+ config: WorkerSpawnConfig,
+ generation: u64,
+ crash_before_reply_once: bool,
+ bound_project_root: Option<PathBuf>,
+ child: Option<Child>,
+ stdin: Option<BufWriter<ChildStdin>>,
+ stdout: Option<BufReader<ChildStdout>>,
+}
+
+impl WorkerSupervisor {
+ pub(super) fn new(config: WorkerSpawnConfig, generation: u64) -> Self {
+ Self {
+ config,
+ generation,
+ crash_before_reply_once: false,
+ bound_project_root: None,
+ child: None,
+ stdin: None,
+ stdout: None,
+ }
+ }
+
+ pub(super) fn generation(&self) -> u64 {
+ self.generation
+ }
+
+ pub(super) fn rebind(&mut self, project_root: PathBuf) {
+ if self
+ .bound_project_root
+ .as_ref()
+ .is_some_and(|current| current == &project_root)
+ {
+ return;
+ }
+ self.kill_current_worker();
+ self.bound_project_root = Some(project_root);
+ }
+
+ pub(super) fn execute(
+ &mut self,
+ request_id: HostRequestId,
+ operation: WorkerOperation,
+ ) -> Result<Value, FaultRecord> {
+ self.ensure_worker()?;
+ let request = WorkerRequest::Execute {
+ id: request_id,
+ operation,
+ };
+ let stdin = self.stdin.as_mut().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.stdin",
+ "worker stdin is not available",
+ )
+ .retryable(Some(self.generation))
+ })?;
+ serde_json::to_writer(&mut *stdin, &request).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.write",
+ format!("failed to write worker request: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ stdin.write_all(b"\n").map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.write",
+ format!("failed to frame worker request: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ stdin.flush().map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.write",
+ format!("failed to flush worker request: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+
+ if self.crash_before_reply_once {
+ self.crash_before_reply_once = false;
+ self.kill_current_worker();
+ return Err(FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.read",
+ "worker crashed before replying",
+ )
+ .retryable(Some(self.generation)));
+ }
+
+ let stdout = self.stdout.as_mut().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.stdout",
+ "worker stdout is not available",
+ )
+ .retryable(Some(self.generation))
+ })?;
+ let mut line = String::new();
+ let bytes = stdout.read_line(&mut line).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.read",
+ format!("failed to read worker response: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ if bytes == 0 {
+ self.kill_current_worker();
+ return Err(FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.read",
+ "worker exited before replying",
+ )
+ .retryable(Some(self.generation)));
+ }
+ let response = serde_json::from_str::<WorkerResponse>(&line).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Protocol,
+ "worker.read",
+ format!("invalid worker response: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ match response.outcome {
+ WorkerOutcome::Success { result } => Ok(result),
+ WorkerOutcome::Fault { fault } => Err(fault),
+ }
+ }
+
+ pub(super) fn restart(&mut self) -> Result<(), FaultRecord> {
+ self.kill_current_worker();
+ self.ensure_worker()
+ }
+
+ pub(super) fn is_alive(&mut self) -> bool {
+ let Some(child) = self.child.as_mut() else {
+ return false;
+ };
+ if let Ok(None) = child.try_wait() {
+ true
+ } else {
+ self.child = None;
+ self.stdin = None;
+ self.stdout = None;
+ false
+ }
+ }
+
+ pub(super) fn arm_crash_once(&mut self) {
+ self.crash_before_reply_once = true;
+ }
+
+ fn ensure_worker(&mut self) -> Result<(), FaultRecord> {
+ if self.is_alive() {
+ return Ok(());
+ }
+ let Some(project_root) = self.bound_project_root.as_ref() else {
+ return Err(FaultRecord::new(
+ FaultKind::Unavailable,
+ FaultStage::Host,
+ "worker.spawn",
+ "project is not bound; call project.bind before using project tools",
+ ));
+ };
+ self.generation += 1;
+ let mut child = Command::new(&self.config.executable)
+ .arg("mcp")
+ .arg("worker")
+ .arg("--project")
+ .arg(project_root)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .spawn()
+ .map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.spawn",
+ format!("failed to spawn worker: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ let stdin = child.stdin.take().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Transport,
+ "worker.spawn",
+ "worker stdin pipe was not created",
+ )
+ })?;
+ let stdout = child.stdout.take().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Transport,
+ "worker.spawn",
+ "worker stdout pipe was not created",
+ )
+ })?;
+ self.child = Some(child);
+ self.stdin = Some(BufWriter::new(stdin));
+ self.stdout = Some(BufReader::new(stdout));
+ Ok(())
+ }
+
+ fn kill_current_worker(&mut self) {
+ if let Some(child) = self.child.as_mut() {
+ let _ = child.kill();
+ let _ = child.wait();
+ }
+ self.child = None;
+ self.stdin = None;
+ self.stdout = None;
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/host/runtime.rs b/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
new file mode 100644
index 0000000..dd75544
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
@@ -0,0 +1,719 @@
+use std::io::{self, BufRead, Write};
+#[cfg(unix)]
+use std::os::unix::process::CommandExt;
+use std::path::PathBuf;
+use std::process::Command;
+use std::time::Instant;
+
+use serde::Serialize;
+use serde_json::{Value, json};
+
+use super::{
+ binary::BinaryRuntime,
+ config::HostConfig,
+ process::{ProjectBinding, WorkerSupervisor},
+};
+use crate::mcp::catalog::{
+ DispatchTarget, ReplayContract, list_resources, resource_spec, tool_definitions, tool_spec,
+};
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+use crate::mcp::protocol::{
+ CRASH_ONCE_ENV, FORCE_ROLLOUT_ENV, HOST_STATE_ENV, HostRequestId, HostStateSeed,
+ PROTOCOL_VERSION, ProjectBindingSeed, SERVER_NAME, SessionSeed, WorkerOperation,
+ WorkerSpawnConfig,
+};
+use crate::mcp::telemetry::{
+ BinaryHealth, BindingHealth, HealthSnapshot, InitializationHealth, ServerTelemetry,
+ WorkerHealth,
+};
+
+pub(crate) fn run_host(
+ initial_project: Option<PathBuf>,
+) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let stdin = io::stdin();
+ let mut stdout = io::stdout().lock();
+ let mut host = HostRuntime::new(HostConfig::new(initial_project)?)?;
+
+ for line in stdin.lock().lines() {
+ let line = match line {
+ Ok(line) => line,
+ Err(error) => {
+ eprintln!("mcp stdin failure: {error}");
+ continue;
+ }
+ };
+ if line.trim().is_empty() {
+ continue;
+ }
+
+ let maybe_response = host.handle_line(&line);
+ if let Some(response) = maybe_response {
+ write_message(&mut stdout, &response)?;
+ }
+ host.maybe_roll_forward()?;
+ }
+
+ Ok(())
+}
+
+struct HostRuntime {
+ config: HostConfig,
+ binding: Option<ProjectBinding>,
+ session: SessionSeed,
+ telemetry: ServerTelemetry,
+ next_request_id: u64,
+ worker: WorkerSupervisor,
+ binary: BinaryRuntime,
+ force_rollout_key: Option<String>,
+ force_rollout_consumed: bool,
+ rollout_requested: bool,
+ crash_once_key: Option<String>,
+ crash_once_consumed: bool,
+}
+
+impl HostRuntime {
+ fn new(config: HostConfig) -> Result<Self, fidget_spinner_store_sqlite::StoreError> {
+ let restored = restore_host_state();
+ let session = restored
+ .as_ref()
+ .map_or_else(SessionSeed::default, |seed| seed.session.clone());
+ let telemetry = restored
+ .as_ref()
+ .map_or_else(ServerTelemetry::default, |seed| seed.telemetry.clone());
+ let next_request_id = restored
+ .as_ref()
+ .map_or(1, |seed| seed.next_request_id.max(1));
+ let worker_generation = restored.as_ref().map_or(0, |seed| seed.worker_generation);
+ let force_rollout_consumed = restored
+ .as_ref()
+ .is_some_and(|seed| seed.force_rollout_consumed);
+ let crash_once_consumed = restored
+ .as_ref()
+ .is_some_and(|seed| seed.crash_once_consumed);
+ let binding = restored
+ .as_ref()
+ .and_then(|seed| seed.binding.clone().map(ProjectBinding::from))
+ .or(config
+ .initial_project
+ .clone()
+ .map(resolve_project_binding)
+ .transpose()?
+ .map(|resolved| resolved.binding));
+
+ let worker = {
+ let mut worker = WorkerSupervisor::new(
+ WorkerSpawnConfig {
+ executable: config.executable.clone(),
+ },
+ worker_generation,
+ );
+ if let Some(project_root) = binding.as_ref().map(|binding| binding.project_root.clone())
+ {
+ worker.rebind(project_root);
+ }
+ worker
+ };
+
+ Ok(Self {
+ config: config.clone(),
+ binding,
+ session,
+ telemetry,
+ next_request_id,
+ worker,
+ binary: BinaryRuntime::new(config.executable.clone())?,
+ force_rollout_key: std::env::var(FORCE_ROLLOUT_ENV).ok(),
+ force_rollout_consumed,
+ rollout_requested: false,
+ crash_once_key: std::env::var(CRASH_ONCE_ENV).ok(),
+ crash_once_consumed,
+ })
+ }
+
+ fn handle_line(&mut self, line: &str) -> Option<Value> {
+ let message = match serde_json::from_str::<Value>(line) {
+ Ok(message) => message,
+ Err(error) => {
+ return Some(jsonrpc_error(
+ Value::Null,
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ "jsonrpc.parse",
+ format!("parse error: {error}"),
+ ),
+ ));
+ }
+ };
+ self.handle_message(message)
+ }
+
+ fn handle_message(&mut self, message: Value) -> Option<Value> {
+ let Some(object) = message.as_object() else {
+ return Some(jsonrpc_error(
+ Value::Null,
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ "jsonrpc.message",
+ "invalid request: expected JSON object",
+ ),
+ ));
+ };
+
+ let method = object.get("method").and_then(Value::as_str)?;
+ let id = object.get("id").cloned();
+ let params = object.get("params").cloned().unwrap_or_else(|| json!({}));
+ let operation_key = operation_key(method, &params);
+ let started_at = Instant::now();
+
+ self.telemetry.record_request(&operation_key);
+ let response = match self.dispatch(method, params, id.clone()) {
+ Ok(Some(result)) => {
+ self.telemetry
+ .record_success(&operation_key, started_at.elapsed().as_millis());
+ id.map(|id| jsonrpc_result(id, result))
+ }
+ Ok(None) => {
+ self.telemetry
+ .record_success(&operation_key, started_at.elapsed().as_millis());
+ None
+ }
+ Err(fault) => {
+ self.telemetry.record_error(
+ &operation_key,
+ fault.clone(),
+ started_at.elapsed().as_millis(),
+ );
+ Some(match id {
+ Some(id) => match method {
+ "tools/call" => jsonrpc_result(id, fault.into_tool_result()),
+ _ => jsonrpc_error(id, fault),
+ },
+ None => jsonrpc_error(Value::Null, fault),
+ })
+ }
+ };
+
+ if self.should_force_rollout(&operation_key) {
+ self.force_rollout_consumed = true;
+ self.telemetry.record_rollout();
+ self.rollout_requested = true;
+ }
+
+ response
+ }
+
+ fn dispatch(
+ &mut self,
+ method: &str,
+ params: Value,
+ request_id: Option<Value>,
+ ) -> Result<Option<Value>, FaultRecord> {
+ match method {
+ "initialize" => {
+ self.session.initialize_params = Some(params.clone());
+ self.session.initialized = false;
+ Ok(Some(json!({
+ "protocolVersion": PROTOCOL_VERSION,
+ "capabilities": {
+ "tools": { "listChanged": false },
+ "resources": { "listChanged": false, "subscribe": false }
+ },
+ "serverInfo": {
+ "name": SERVER_NAME,
+ "version": env!("CARGO_PKG_VERSION")
+ },
+ "instructions": "The DAG is canonical truth. Frontier state is derived. Bind the session with project.bind before project-local DAG operations when the MCP is running unbound."
+ })))
+ }
+ "notifications/initialized" => {
+ if self.session.initialize_params.is_none() {
+ return Err(FaultRecord::new(
+ FaultKind::NotInitialized,
+ FaultStage::Host,
+ "notifications/initialized",
+ "received initialized notification before initialize",
+ ));
+ }
+ self.session.initialized = true;
+ Ok(None)
+ }
+ "notifications/cancelled" => Ok(None),
+ "ping" => Ok(Some(json!({}))),
+ other => {
+ self.require_initialized(other)?;
+ match other {
+ "tools/list" => Ok(Some(json!({ "tools": tool_definitions() }))),
+ "resources/list" => Ok(Some(json!({ "resources": list_resources() }))),
+ "tools/call" => Ok(Some(self.dispatch_tool_call(params, request_id)?)),
+ "resources/read" => Ok(Some(self.dispatch_resource_read(params)?)),
+ _ => Err(FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ other,
+ format!("method `{other}` is not implemented"),
+ )),
+ }
+ }
+ }
+ }
+
+ fn dispatch_tool_call(
+ &mut self,
+ params: Value,
+ _request_id: Option<Value>,
+ ) -> Result<Value, FaultRecord> {
+ let envelope = deserialize::<ToolCallEnvelope>(params, "tools/call")?;
+ let spec = tool_spec(&envelope.name).ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ format!("tools/call:{}", envelope.name),
+ format!("unknown tool `{}`", envelope.name),
+ )
+ })?;
+ match spec.dispatch {
+ DispatchTarget::Host => self.handle_host_tool(&envelope.name, envelope.arguments),
+ DispatchTarget::Worker => self.dispatch_worker_tool(spec, envelope.arguments),
+ }
+ }
+
+ fn dispatch_resource_read(&mut self, params: Value) -> Result<Value, FaultRecord> {
+ let args = deserialize::<ReadResourceArgs>(params, "resources/read")?;
+ let spec = resource_spec(&args.uri).ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ format!("resources/read:{}", args.uri),
+ format!("unknown resource `{}`", args.uri),
+ )
+ })?;
+ match spec.dispatch {
+ DispatchTarget::Host => Ok(Self::handle_host_resource(spec.uri)),
+ DispatchTarget::Worker => self.dispatch_worker_operation(
+ format!("resources/read:{}", args.uri),
+ spec.replay,
+ WorkerOperation::ReadResource { uri: args.uri },
+ ),
+ }
+ }
+
+ fn dispatch_worker_tool(
+ &mut self,
+ spec: crate::mcp::catalog::ToolSpec,
+ arguments: Value,
+ ) -> Result<Value, FaultRecord> {
+ let operation = format!("tools/call:{}", spec.name);
+ self.dispatch_worker_operation(
+ operation.clone(),
+ spec.replay,
+ WorkerOperation::CallTool {
+ name: spec.name.to_owned(),
+ arguments,
+ },
+ )
+ }
+
+ fn dispatch_worker_operation(
+ &mut self,
+ operation: String,
+ replay: ReplayContract,
+ worker_operation: WorkerOperation,
+ ) -> Result<Value, FaultRecord> {
+ let binding = self.require_bound_project(&operation)?;
+ self.worker.rebind(binding.project_root.clone());
+
+ if self.should_crash_worker_once(&operation) {
+ self.worker.arm_crash_once();
+ }
+
+ let request_id = self.allocate_request_id();
+ match self.worker.execute(request_id, worker_operation.clone()) {
+ Ok(result) => Ok(result),
+ Err(fault) => {
+ if replay == ReplayContract::SafeReplay && fault.retryable {
+ self.telemetry.record_retry(&operation);
+ self.telemetry.record_worker_restart();
+ self.worker
+ .restart()
+ .map_err(|restart_fault| restart_fault.mark_retried())?;
+ match self.worker.execute(request_id, worker_operation) {
+ Ok(result) => Ok(result),
+ Err(retry_fault) => Err(retry_fault.mark_retried()),
+ }
+ } else {
+ Err(fault)
+ }
+ }
+ }
+ }
+
+ fn handle_host_tool(&mut self, name: &str, arguments: Value) -> Result<Value, FaultRecord> {
+ match name {
+ "project.bind" => {
+ let args = deserialize::<ProjectBindArgs>(arguments, "tools/call:project.bind")?;
+ let resolved = resolve_project_binding(PathBuf::from(args.path))
+ .map_err(host_store_fault("tools/call:project.bind"))?;
+ self.worker.rebind(resolved.binding.project_root.clone());
+ self.binding = Some(resolved.binding);
+ tool_success(&resolved.status)
+ }
+ "skill.list" => tool_success(&json!({
+ "skills": crate::bundled_skill::bundled_skill_summaries(),
+ })),
+ "skill.show" => {
+ let args = deserialize::<SkillShowArgs>(arguments, "tools/call:skill.show")?;
+ let skill = args.name.as_deref().map_or_else(
+ || Ok(crate::bundled_skill::default_bundled_skill()),
+ |name| {
+ crate::bundled_skill::bundled_skill(name).ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ "tools/call:skill.show",
+ format!("unknown bundled skill `{name}`"),
+ )
+ })
+ },
+ )?;
+ tool_success(&json!({
+ "name": skill.name,
+ "description": skill.description,
+ "resource_uri": skill.resource_uri,
+ "body": skill.body,
+ }))
+ }
+ "system.health" => tool_success(&HealthSnapshot {
+ initialization: InitializationHealth {
+ ready: self.session.initialized,
+ seed_captured: self.session.initialize_params.is_some(),
+ },
+ binding: binding_health(self.binding.as_ref()),
+ worker: WorkerHealth {
+ worker_generation: self.worker.generation(),
+ alive: self.worker.is_alive(),
+ },
+ binary: BinaryHealth {
+ current_executable: self.binary.path.display().to_string(),
+ launch_path_stable: self.binary.launch_path_stable,
+ rollout_pending: self.binary.rollout_pending().unwrap_or(false),
+ },
+ last_fault: self.telemetry.last_fault.clone(),
+ }),
+ "system.telemetry" => tool_success(&self.telemetry),
+ other => Err(FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ format!("tools/call:{other}"),
+ format!("unknown host tool `{other}`"),
+ )),
+ }
+ }
+
+ fn handle_host_resource(uri: &str) -> Value {
+ match uri {
+ "fidget-spinner://skill/fidget-spinner" => {
+ skill_resource(uri, crate::bundled_skill::default_bundled_skill().body)
+ }
+ "fidget-spinner://skill/frontier-loop" => skill_resource(
+ uri,
+ crate::bundled_skill::frontier_loop_bundled_skill().body,
+ ),
+ _ => unreachable!("host resources are catalog-gated"),
+ }
+ }
+
+ fn require_initialized(&self, operation: &str) -> Result<(), FaultRecord> {
+ if self.session.initialized {
+ return Ok(());
+ }
+ Err(FaultRecord::new(
+ FaultKind::NotInitialized,
+ FaultStage::Host,
+ operation,
+ "client must call initialize and notifications/initialized before normal operations",
+ ))
+ }
+
+ fn require_bound_project(&self, operation: &str) -> Result<&ProjectBinding, FaultRecord> {
+ self.binding.as_ref().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Unavailable,
+ FaultStage::Host,
+ operation,
+ "project is not bound; call project.bind with the target project root or a nested path inside it",
+ )
+ })
+ }
+
+ fn allocate_request_id(&mut self) -> HostRequestId {
+ let id = HostRequestId(self.next_request_id);
+ self.next_request_id += 1;
+ id
+ }
+
+ fn maybe_roll_forward(&mut self) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let binary_pending = self.binary.rollout_pending()?;
+ if !self.rollout_requested && !binary_pending {
+ return Ok(());
+ }
+ if binary_pending && !self.rollout_requested {
+ self.telemetry.record_rollout();
+ }
+ self.roll_forward()
+ }
+
+ fn roll_forward(&mut self) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let state = HostStateSeed {
+ session: self.session.clone(),
+ telemetry: self.telemetry.clone(),
+ next_request_id: self.next_request_id,
+ binding: self.binding.clone().map(ProjectBindingSeed::from),
+ worker_generation: self.worker.generation(),
+ force_rollout_consumed: self.force_rollout_consumed,
+ crash_once_consumed: self.crash_once_consumed,
+ };
+ let serialized = serde_json::to_string(&state)?;
+ let mut command = Command::new(&self.binary.path);
+ let _ = command.arg("mcp").arg("serve");
+ if let Some(project) = self.config.initial_project.as_ref() {
+ let _ = command.arg("--project").arg(project);
+ }
+ let _ = command.env(HOST_STATE_ENV, serialized);
+ #[cfg(unix)]
+ {
+ let error = command.exec();
+ Err(fidget_spinner_store_sqlite::StoreError::Io(error))
+ }
+ #[cfg(not(unix))]
+ {
+ return Err(fidget_spinner_store_sqlite::StoreError::Io(io::Error::new(
+ io::ErrorKind::Unsupported,
+ "host rollout requires unix exec support",
+ )));
+ }
+ }
+
+ fn should_force_rollout(&self, operation: &str) -> bool {
+ self.force_rollout_key
+ .as_deref()
+ .is_some_and(|key| key == operation)
+ && !self.force_rollout_consumed
+ }
+
+ fn should_crash_worker_once(&mut self, operation: &str) -> bool {
+ let should_crash = self
+ .crash_once_key
+ .as_deref()
+ .is_some_and(|key| key == operation)
+ && !self.crash_once_consumed;
+ if should_crash {
+ self.crash_once_consumed = true;
+ }
+ should_crash
+ }
+}
+
+#[derive(Debug, Serialize)]
+struct ProjectBindStatus {
+ requested_path: String,
+ project_root: String,
+ state_root: String,
+ display_name: fidget_spinner_core::NonEmptyText,
+ schema: fidget_spinner_core::PayloadSchemaRef,
+ git_repo_detected: bool,
+}
+
+struct ResolvedProjectBinding {
+ binding: ProjectBinding,
+ status: ProjectBindStatus,
+}
+
+fn resolve_project_binding(
+ requested_path: PathBuf,
+) -> Result<ResolvedProjectBinding, fidget_spinner_store_sqlite::StoreError> {
+ let store = crate::open_store(&requested_path)?;
+ Ok(ResolvedProjectBinding {
+ binding: ProjectBinding {
+ requested_path: requested_path.clone(),
+ project_root: PathBuf::from(store.project_root().as_str()),
+ },
+ status: ProjectBindStatus {
+ requested_path: requested_path.display().to_string(),
+ project_root: store.project_root().to_string(),
+ state_root: store.state_root().to_string(),
+ display_name: store.config().display_name.clone(),
+ schema: store.schema().schema_ref(),
+ git_repo_detected: crate::run_git(
+ store.project_root(),
+ &["rev-parse", "--show-toplevel"],
+ )?
+ .is_some(),
+ },
+ })
+}
+
+fn binding_health(binding: Option<&ProjectBinding>) -> BindingHealth {
+ match binding {
+ Some(binding) => BindingHealth {
+ bound: true,
+ requested_path: Some(binding.requested_path.display().to_string()),
+ project_root: Some(binding.project_root.display().to_string()),
+ state_root: Some(
+ binding
+ .project_root
+ .join(fidget_spinner_store_sqlite::STORE_DIR_NAME)
+ .display()
+ .to_string(),
+ ),
+ },
+ None => BindingHealth {
+ bound: false,
+ requested_path: None,
+ project_root: None,
+ state_root: None,
+ },
+ }
+}
+
+fn skill_resource(uri: &str, body: &str) -> Value {
+ json!({
+ "contents": [{
+ "uri": uri,
+ "mimeType": "text/markdown",
+ "text": body,
+ }]
+ })
+}
+
+impl From<ProjectBindingSeed> for ProjectBinding {
+ fn from(value: ProjectBindingSeed) -> Self {
+ Self {
+ requested_path: value.requested_path,
+ project_root: value.project_root,
+ }
+ }
+}
+
+impl From<ProjectBinding> for ProjectBindingSeed {
+ fn from(value: ProjectBinding) -> Self {
+ Self {
+ requested_path: value.requested_path,
+ project_root: value.project_root,
+ }
+ }
+}
+
+fn restore_host_state() -> Option<HostStateSeed> {
+ let raw = std::env::var(HOST_STATE_ENV).ok()?;
+ serde_json::from_str::<HostStateSeed>(&raw).ok()
+}
+
+fn deserialize<T: for<'de> serde::Deserialize<'de>>(
+ value: Value,
+ operation: &str,
+) -> Result<T, FaultRecord> {
+ serde_json::from_value(value).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ operation,
+ format!("invalid params: {error}"),
+ )
+ })
+}
+
+fn operation_key(method: &str, params: &Value) -> String {
+ match method {
+ "tools/call" => params.get("name").and_then(Value::as_str).map_or_else(
+ || "tools/call".to_owned(),
+ |name| format!("tools/call:{name}"),
+ ),
+ "resources/read" => params.get("uri").and_then(Value::as_str).map_or_else(
+ || "resources/read".to_owned(),
+ |uri| format!("resources/read:{uri}"),
+ ),
+ other => other.to_owned(),
+ }
+}
+
+fn tool_success(value: &impl Serialize) -> Result<Value, FaultRecord> {
+ Ok(json!({
+ "content": [{
+ "type": "text",
+ "text": crate::to_pretty_json(value).map_err(|error| {
+ FaultRecord::new(FaultKind::Internal, FaultStage::Host, "tool_success", error.to_string())
+ })?,
+ }],
+ "structuredContent": serde_json::to_value(value).map_err(|error| {
+ FaultRecord::new(FaultKind::Internal, FaultStage::Host, "tool_success", error.to_string())
+ })?,
+ "isError": false,
+ }))
+}
+
+fn host_store_fault(
+ operation: &'static str,
+) -> impl FnOnce(fidget_spinner_store_sqlite::StoreError) -> FaultRecord {
+ move |error| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ operation,
+ error.to_string(),
+ )
+ }
+}
+
+fn jsonrpc_result(id: Value, result: Value) -> Value {
+ json!({
+ "jsonrpc": "2.0",
+ "id": id,
+ "result": result,
+ })
+}
+
+fn jsonrpc_error(id: Value, fault: FaultRecord) -> Value {
+ json!({
+ "jsonrpc": "2.0",
+ "id": id,
+ "error": fault.into_jsonrpc_error(),
+ })
+}
+
+fn write_message(
+ stdout: &mut impl Write,
+ message: &Value,
+) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ serde_json::to_writer(&mut *stdout, message)?;
+ stdout.write_all(b"\n")?;
+ stdout.flush()?;
+ Ok(())
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct ToolCallEnvelope {
+ name: String,
+ #[serde(default = "empty_json_object")]
+ arguments: Value,
+}
+
+fn empty_json_object() -> Value {
+ json!({})
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct ReadResourceArgs {
+ uri: String,
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct ProjectBindArgs {
+ path: String,
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct SkillShowArgs {
+ name: Option<String>,
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/mod.rs b/crates/fidget-spinner-cli/src/mcp/mod.rs
new file mode 100644
index 0000000..adea066
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/mod.rs
@@ -0,0 +1,10 @@
+mod catalog;
+mod fault;
+mod host;
+mod protocol;
+mod service;
+mod telemetry;
+mod worker;
+
+pub(crate) use host::serve;
+pub(crate) use worker::serve as serve_worker;
diff --git a/crates/fidget-spinner-cli/src/mcp/protocol.rs b/crates/fidget-spinner-cli/src/mcp/protocol.rs
new file mode 100644
index 0000000..1f24f37
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/protocol.rs
@@ -0,0 +1,86 @@
+use std::path::PathBuf;
+
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+
+use crate::mcp::telemetry::ServerTelemetry;
+
+pub(crate) const PROTOCOL_VERSION: &str = "2025-11-25";
+pub(crate) const SERVER_NAME: &str = "fidget-spinner";
+pub(crate) const HOST_STATE_ENV: &str = "FIDGET_SPINNER_MCP_HOST_STATE";
+pub(crate) const FORCE_ROLLOUT_ENV: &str = "FIDGET_SPINNER_MCP_TEST_FORCE_ROLLOUT_KEY";
+pub(crate) const CRASH_ONCE_ENV: &str = "FIDGET_SPINNER_MCP_TEST_HOST_CRASH_ONCE_KEY";
+pub(crate) const TRANSIENT_ONCE_ENV: &str = "FIDGET_SPINNER_MCP_TEST_WORKER_TRANSIENT_ONCE_KEY";
+pub(crate) const TRANSIENT_ONCE_MARKER_ENV: &str =
+ "FIDGET_SPINNER_MCP_TEST_WORKER_TRANSIENT_ONCE_MARKER";
+
+#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct SessionSeed {
+ pub initialize_params: Option<Value>,
+ pub initialized: bool,
+}
+
+#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct HostStateSeed {
+ pub session: SessionSeed,
+ pub telemetry: ServerTelemetry,
+ pub next_request_id: u64,
+ pub binding: Option<ProjectBindingSeed>,
+ pub worker_generation: u64,
+ pub force_rollout_consumed: bool,
+ pub crash_once_consumed: bool,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct ProjectBindingSeed {
+ pub requested_path: PathBuf,
+ pub project_root: PathBuf,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+#[serde(transparent)]
+pub(crate) struct HostRequestId(pub u64);
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+#[serde(tag = "kind", rename_all = "snake_case")]
+pub(crate) enum WorkerRequest {
+ Execute {
+ id: HostRequestId,
+ operation: WorkerOperation,
+ },
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+#[serde(tag = "kind", rename_all = "snake_case")]
+pub(crate) enum WorkerOperation {
+ CallTool { name: String, arguments: Value },
+ ReadResource { uri: String },
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct WorkerResponse {
+ pub id: HostRequestId,
+ pub outcome: WorkerOutcome,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+#[serde(tag = "status", rename_all = "snake_case")]
+pub(crate) enum WorkerOutcome {
+ Success {
+ result: Value,
+ },
+ Fault {
+ fault: crate::mcp::fault::FaultRecord,
+ },
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct BinaryFingerprint {
+ pub length_bytes: u64,
+ pub modified_unix_nanos: u128,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct WorkerSpawnConfig {
+ pub executable: PathBuf,
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/service.rs b/crates/fidget-spinner-cli/src/mcp/service.rs
new file mode 100644
index 0000000..a7cae10
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/service.rs
@@ -0,0 +1,813 @@
+use std::collections::BTreeMap;
+use std::fs;
+
+use camino::{Utf8Path, Utf8PathBuf};
+use fidget_spinner_core::{
+ AnnotationVisibility, CodeSnapshotRef, CommandRecipe, ExecutionBackend, FrontierContract,
+ FrontierNote, FrontierVerdict, MetricObservation, MetricSpec, MetricUnit, NodeAnnotation,
+ NodeClass, NodePayload, NonEmptyText,
+};
+use fidget_spinner_store_sqlite::{
+ CloseExperimentRequest, CreateFrontierRequest, CreateNodeRequest, EdgeAttachment,
+ EdgeAttachmentDirection, ListNodesQuery, ProjectStore, StoreError,
+};
+use serde::Deserialize;
+use serde_json::{Map, Value, json};
+
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+use crate::mcp::protocol::{TRANSIENT_ONCE_ENV, TRANSIENT_ONCE_MARKER_ENV, WorkerOperation};
+
+pub(crate) struct WorkerService {
+ store: ProjectStore,
+}
+
+impl WorkerService {
+ pub fn new(project: &Utf8Path) -> Result<Self, StoreError> {
+ Ok(Self {
+ store: crate::open_store(project.as_std_path())?,
+ })
+ }
+
+ pub fn execute(&mut self, operation: WorkerOperation) -> Result<Value, FaultRecord> {
+ let operation_key = match &operation {
+ WorkerOperation::CallTool { name, .. } => format!("tools/call:{name}"),
+ WorkerOperation::ReadResource { uri } => format!("resources/read:{uri}"),
+ };
+ Self::maybe_inject_transient(&operation_key)?;
+
+ match operation {
+ WorkerOperation::CallTool { name, arguments } => self.call_tool(&name, arguments),
+ WorkerOperation::ReadResource { uri } => self.read_resource(&uri),
+ }
+ }
+
+ fn call_tool(&mut self, name: &str, arguments: Value) -> Result<Value, FaultRecord> {
+ match name {
+ "project.status" => tool_success(&json!({
+ "project_root": self.store.project_root(),
+ "state_root": self.store.state_root(),
+ "display_name": self.store.config().display_name,
+ "schema": self.store.schema().schema_ref(),
+ "git_repo_detected": crate::run_git(self.store.project_root(), &["rev-parse", "--show-toplevel"])
+ .map_err(store_fault("tools/call:project.status"))?
+ .is_some(),
+ })),
+ "project.schema" => tool_success(self.store.schema()),
+ "frontier.list" => tool_success(
+ &self
+ .store
+ .list_frontiers()
+ .map_err(store_fault("tools/call:frontier.list"))?,
+ ),
+ "frontier.status" => {
+ let args = deserialize::<FrontierStatusToolArgs>(arguments)?;
+ tool_success(
+ &self
+ .store
+ .frontier_projection(
+ crate::parse_frontier_id(&args.frontier_id)
+ .map_err(store_fault("tools/call:frontier.status"))?,
+ )
+ .map_err(store_fault("tools/call:frontier.status"))?,
+ )
+ }
+ "frontier.init" => {
+ let args = deserialize::<FrontierInitToolArgs>(arguments)?;
+ let initial_checkpoint = self
+ .store
+ .auto_capture_checkpoint(
+ NonEmptyText::new(
+ args.seed_summary
+ .unwrap_or_else(|| "initial champion checkpoint".to_owned()),
+ )
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ )
+ .map_err(store_fault("tools/call:frontier.init"))?;
+ let projection = self
+ .store
+ .create_frontier(CreateFrontierRequest {
+ label: NonEmptyText::new(args.label)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ contract_title: NonEmptyText::new(args.contract_title)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ contract_summary: args
+ .contract_summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ contract: FrontierContract {
+ objective: NonEmptyText::new(args.objective)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ evaluation: fidget_spinner_core::EvaluationProtocol {
+ benchmark_suites: crate::to_text_set(args.benchmark_suites)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ primary_metric: MetricSpec {
+ metric_key: NonEmptyText::new(args.primary_metric.key)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ unit: parse_metric_unit_name(&args.primary_metric.unit)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ objective: crate::parse_optimization_objective(
+ &args.primary_metric.objective,
+ )
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ },
+ supporting_metrics: args
+ .supporting_metrics
+ .into_iter()
+ .map(metric_spec_from_wire)
+ .collect::<Result<_, _>>()
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ },
+ promotion_criteria: crate::to_text_vec(args.promotion_criteria)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ },
+ initial_checkpoint,
+ })
+ .map_err(store_fault("tools/call:frontier.init"))?;
+ tool_success(&projection)
+ }
+ "node.create" => {
+ let args = deserialize::<NodeCreateToolArgs>(arguments)?;
+ let node = self
+ .store
+ .add_node(CreateNodeRequest {
+ class: parse_node_class_name(&args.class)
+ .map_err(store_fault("tools/call:node.create"))?,
+ frontier_id: args
+ .frontier_id
+ .as_deref()
+ .map(crate::parse_frontier_id)
+ .transpose()
+ .map_err(store_fault("tools/call:node.create"))?,
+ title: NonEmptyText::new(args.title)
+ .map_err(store_fault("tools/call:node.create"))?,
+ summary: args
+ .summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:node.create"))?,
+ payload: NodePayload::with_schema(
+ self.store.schema().schema_ref(),
+ args.payload.unwrap_or_default(),
+ ),
+ annotations: tool_annotations(args.annotations)
+ .map_err(store_fault("tools/call:node.create"))?,
+ attachments: lineage_attachments(args.parents)
+ .map_err(store_fault("tools/call:node.create"))?,
+ })
+ .map_err(store_fault("tools/call:node.create"))?;
+ tool_success(&node)
+ }
+ "change.record" => {
+ let args = deserialize::<ChangeRecordToolArgs>(arguments)?;
+ let mut fields = Map::new();
+ let _ = fields.insert("body".to_owned(), Value::String(args.body));
+ if let Some(hypothesis) = args.hypothesis {
+ let _ = fields.insert("hypothesis".to_owned(), Value::String(hypothesis));
+ }
+ if let Some(base_checkpoint_id) = args.base_checkpoint_id {
+ let _ = fields.insert(
+ "base_checkpoint_id".to_owned(),
+ Value::String(base_checkpoint_id),
+ );
+ }
+ if let Some(benchmark_suite) = args.benchmark_suite {
+ let _ =
+ fields.insert("benchmark_suite".to_owned(), Value::String(benchmark_suite));
+ }
+ let node = self
+ .store
+ .add_node(CreateNodeRequest {
+ class: NodeClass::Change,
+ frontier_id: Some(
+ crate::parse_frontier_id(&args.frontier_id)
+ .map_err(store_fault("tools/call:change.record"))?,
+ ),
+ title: NonEmptyText::new(args.title)
+ .map_err(store_fault("tools/call:change.record"))?,
+ summary: args
+ .summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:change.record"))?,
+ payload: NodePayload::with_schema(self.store.schema().schema_ref(), fields),
+ annotations: tool_annotations(args.annotations)
+ .map_err(store_fault("tools/call:change.record"))?,
+ attachments: lineage_attachments(args.parents)
+ .map_err(store_fault("tools/call:change.record"))?,
+ })
+ .map_err(store_fault("tools/call:change.record"))?;
+ tool_success(&node)
+ }
+ "node.list" => {
+ let args = deserialize::<NodeListToolArgs>(arguments)?;
+ let nodes = self
+ .store
+ .list_nodes(ListNodesQuery {
+ frontier_id: args
+ .frontier_id
+ .as_deref()
+ .map(crate::parse_frontier_id)
+ .transpose()
+ .map_err(store_fault("tools/call:node.list"))?,
+ class: args
+ .class
+ .as_deref()
+ .map(parse_node_class_name)
+ .transpose()
+ .map_err(store_fault("tools/call:node.list"))?,
+ include_archived: args.include_archived,
+ limit: args.limit.unwrap_or(20),
+ })
+ .map_err(store_fault("tools/call:node.list"))?;
+ tool_success(&nodes)
+ }
+ "node.read" => {
+ let args = deserialize::<NodeReadToolArgs>(arguments)?;
+ let node_id = crate::parse_node_id(&args.node_id)
+ .map_err(store_fault("tools/call:node.read"))?;
+ let node = self
+ .store
+ .get_node(node_id)
+ .map_err(store_fault("tools/call:node.read"))?
+ .ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Store,
+ "tools/call:node.read",
+ format!("node {node_id} was not found"),
+ )
+ })?;
+ tool_success(&node)
+ }
+ "node.annotate" => {
+ let args = deserialize::<NodeAnnotateToolArgs>(arguments)?;
+ let annotation = NodeAnnotation {
+ id: fidget_spinner_core::AnnotationId::fresh(),
+ visibility: if args.visible {
+ AnnotationVisibility::Visible
+ } else {
+ AnnotationVisibility::HiddenByDefault
+ },
+ label: args
+ .label
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:node.annotate"))?,
+ body: NonEmptyText::new(args.body)
+ .map_err(store_fault("tools/call:node.annotate"))?,
+ created_at: time::OffsetDateTime::now_utc(),
+ };
+ self.store
+ .annotate_node(
+ crate::parse_node_id(&args.node_id)
+ .map_err(store_fault("tools/call:node.annotate"))?,
+ annotation,
+ )
+ .map_err(store_fault("tools/call:node.annotate"))?;
+ tool_success(&json!({"annotated": args.node_id}))
+ }
+ "node.archive" => {
+ let args = deserialize::<NodeArchiveToolArgs>(arguments)?;
+ self.store
+ .archive_node(
+ crate::parse_node_id(&args.node_id)
+ .map_err(store_fault("tools/call:node.archive"))?,
+ )
+ .map_err(store_fault("tools/call:node.archive"))?;
+ tool_success(&json!({"archived": args.node_id}))
+ }
+ "note.quick" => {
+ let args = deserialize::<QuickNoteToolArgs>(arguments)?;
+ let node = self
+ .store
+ .add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: args
+ .frontier_id
+ .as_deref()
+ .map(crate::parse_frontier_id)
+ .transpose()
+ .map_err(store_fault("tools/call:note.quick"))?,
+ title: NonEmptyText::new(args.title)
+ .map_err(store_fault("tools/call:note.quick"))?,
+ summary: None,
+ payload: NodePayload::with_schema(
+ self.store.schema().schema_ref(),
+ crate::json_object(json!({ "body": args.body }))
+ .map_err(store_fault("tools/call:note.quick"))?,
+ ),
+ annotations: tool_annotations(args.annotations)
+ .map_err(store_fault("tools/call:note.quick"))?,
+ attachments: lineage_attachments(args.parents)
+ .map_err(store_fault("tools/call:note.quick"))?,
+ })
+ .map_err(store_fault("tools/call:note.quick"))?;
+ tool_success(&node)
+ }
+ "research.record" => {
+ let args = deserialize::<ResearchRecordToolArgs>(arguments)?;
+ let node = self
+ .store
+ .add_node(CreateNodeRequest {
+ class: NodeClass::Research,
+ frontier_id: args
+ .frontier_id
+ .as_deref()
+ .map(crate::parse_frontier_id)
+ .transpose()
+ .map_err(store_fault("tools/call:research.record"))?,
+ title: NonEmptyText::new(args.title)
+ .map_err(store_fault("tools/call:research.record"))?,
+ summary: args
+ .summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:research.record"))?,
+ payload: NodePayload::with_schema(
+ self.store.schema().schema_ref(),
+ crate::json_object(json!({ "body": args.body }))
+ .map_err(store_fault("tools/call:research.record"))?,
+ ),
+ annotations: tool_annotations(args.annotations)
+ .map_err(store_fault("tools/call:research.record"))?,
+ attachments: lineage_attachments(args.parents)
+ .map_err(store_fault("tools/call:research.record"))?,
+ })
+ .map_err(store_fault("tools/call:research.record"))?;
+ tool_success(&node)
+ }
+ "experiment.close" => {
+ let args = deserialize::<ExperimentCloseToolArgs>(arguments)?;
+ let frontier_id = crate::parse_frontier_id(&args.frontier_id)
+ .map_err(store_fault("tools/call:experiment.close"))?;
+ let snapshot = self
+ .store
+ .auto_capture_checkpoint(
+ NonEmptyText::new(args.candidate_summary.clone())
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ )
+ .map_err(store_fault("tools/call:experiment.close"))?
+ .map(|seed| seed.snapshot)
+ .ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Store,
+ "tools/call:experiment.close",
+ format!(
+ "git repository inspection failed for {}",
+ self.store.project_root()
+ ),
+ )
+ })?;
+ let receipt = self
+ .store
+ .close_experiment(CloseExperimentRequest {
+ frontier_id,
+ base_checkpoint_id: crate::parse_checkpoint_id(&args.base_checkpoint_id)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ change_node_id: crate::parse_node_id(&args.change_node_id)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ candidate_summary: NonEmptyText::new(args.candidate_summary)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ candidate_snapshot: snapshot,
+ run_title: NonEmptyText::new(args.run.title)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ run_summary: args
+ .run
+ .summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ backend: parse_backend_name(&args.run.backend)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ benchmark_suite: NonEmptyText::new(args.run.benchmark_suite)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ command: command_recipe_from_wire(
+ args.run.command,
+ self.store.project_root(),
+ )
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ code_snapshot: Some(
+ capture_code_snapshot(self.store.project_root())
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ ),
+ primary_metric: metric_observation_from_wire(args.primary_metric)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ supporting_metrics: args
+ .supporting_metrics
+ .into_iter()
+ .map(metric_observation_from_wire)
+ .collect::<Result<Vec<_>, _>>()
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ note: FrontierNote {
+ summary: NonEmptyText::new(args.note.summary)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ next_hypotheses: crate::to_text_vec(args.note.next_hypotheses)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ },
+ verdict: parse_verdict_name(&args.verdict)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ decision_title: NonEmptyText::new(args.decision_title)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ decision_rationale: NonEmptyText::new(args.decision_rationale)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ analysis_node_id: args
+ .analysis_node_id
+ .as_deref()
+ .map(crate::parse_node_id)
+ .transpose()
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ })
+ .map_err(store_fault("tools/call:experiment.close"))?;
+ tool_success(&receipt)
+ }
+ other => Err(FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Worker,
+ format!("tools/call:{other}"),
+ format!("unknown tool `{other}`"),
+ )),
+ }
+ }
+
+ fn read_resource(&mut self, uri: &str) -> Result<Value, FaultRecord> {
+ match uri {
+ "fidget-spinner://project/config" => Ok(json!({
+ "contents": [{
+ "uri": uri,
+ "mimeType": "application/json",
+ "text": crate::to_pretty_json(self.store.config())
+ .map_err(store_fault("resources/read:fidget-spinner://project/config"))?,
+ }]
+ })),
+ "fidget-spinner://project/schema" => Ok(json!({
+ "contents": [{
+ "uri": uri,
+ "mimeType": "application/json",
+ "text": crate::to_pretty_json(self.store.schema())
+ .map_err(store_fault("resources/read:fidget-spinner://project/schema"))?,
+ }]
+ })),
+ _ => Err(FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Worker,
+ format!("resources/read:{uri}"),
+ format!("unknown resource `{uri}`"),
+ )),
+ }
+ }
+
+ fn maybe_inject_transient(operation: &str) -> Result<(), FaultRecord> {
+ let Some(target_operation) = std::env::var_os(TRANSIENT_ONCE_ENV) else {
+ return Ok(());
+ };
+ let target_operation = target_operation.to_string_lossy();
+ if target_operation != operation {
+ return Ok(());
+ }
+ let Some(marker_path) = std::env::var_os(TRANSIENT_ONCE_MARKER_ENV) else {
+ return Ok(());
+ };
+ if Utf8PathBuf::from(marker_path.to_string_lossy().into_owned()).exists() {
+ return Ok(());
+ }
+ fs::write(&marker_path, b"triggered").map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Worker,
+ operation,
+ format!("failed to write transient marker: {error}"),
+ )
+ })?;
+ Err(FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Worker,
+ operation,
+ format!("injected transient fault for {operation}"),
+ )
+ .retryable(None))
+ }
+}
+
+fn deserialize<T: for<'de> Deserialize<'de>>(value: Value) -> Result<T, FaultRecord> {
+ serde_json::from_value(value).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ "worker.deserialize",
+ format!("invalid params: {error}"),
+ )
+ })
+}
+
+fn tool_success(value: &impl serde::Serialize) -> Result<Value, FaultRecord> {
+ Ok(json!({
+ "content": [{
+ "type": "text",
+ "text": crate::to_pretty_json(value).map_err(store_fault("worker.tool_success"))?,
+ }],
+ "structuredContent": serde_json::to_value(value)
+ .map_err(store_fault("worker.tool_success"))?,
+ "isError": false,
+ }))
+}
+
+fn store_fault<E>(operation: &'static str) -> impl FnOnce(E) -> FaultRecord
+where
+ E: std::fmt::Display,
+{
+ move |error| {
+ FaultRecord::new(
+ classify_fault_kind(&error.to_string()),
+ FaultStage::Store,
+ operation,
+ error.to_string(),
+ )
+ }
+}
+
+fn classify_fault_kind(message: &str) -> FaultKind {
+ if message.contains("was not found")
+ || message.contains("invalid")
+ || message.contains("unknown")
+ || message.contains("empty")
+ {
+ FaultKind::InvalidInput
+ } else {
+ FaultKind::Internal
+ }
+}
+
+fn tool_annotations(raw: Vec<WireAnnotation>) -> Result<Vec<NodeAnnotation>, StoreError> {
+ raw.into_iter()
+ .map(|annotation| {
+ Ok(NodeAnnotation {
+ id: fidget_spinner_core::AnnotationId::fresh(),
+ visibility: if annotation.visible {
+ AnnotationVisibility::Visible
+ } else {
+ AnnotationVisibility::HiddenByDefault
+ },
+ label: annotation.label.map(NonEmptyText::new).transpose()?,
+ body: NonEmptyText::new(annotation.body)?,
+ created_at: time::OffsetDateTime::now_utc(),
+ })
+ })
+ .collect()
+}
+
+fn lineage_attachments(parents: Vec<String>) -> Result<Vec<EdgeAttachment>, StoreError> {
+ parents
+ .into_iter()
+ .map(|parent| {
+ Ok(EdgeAttachment {
+ node_id: crate::parse_node_id(&parent)?,
+ kind: fidget_spinner_core::EdgeKind::Lineage,
+ direction: EdgeAttachmentDirection::ExistingToNew,
+ })
+ })
+ .collect()
+}
+
+fn metric_spec_from_wire(raw: WireMetricSpec) -> Result<MetricSpec, StoreError> {
+ Ok(MetricSpec {
+ metric_key: NonEmptyText::new(raw.key)?,
+ unit: parse_metric_unit_name(&raw.unit)?,
+ objective: crate::parse_optimization_objective(&raw.objective)?,
+ })
+}
+
+fn metric_observation_from_wire(
+ raw: WireMetricObservation,
+) -> Result<MetricObservation, StoreError> {
+ Ok(MetricObservation {
+ metric_key: NonEmptyText::new(raw.key)?,
+ unit: parse_metric_unit_name(&raw.unit)?,
+ objective: crate::parse_optimization_objective(&raw.objective)?,
+ value: raw.value,
+ })
+}
+
+fn command_recipe_from_wire(
+ raw: WireRunCommand,
+ project_root: &Utf8Path,
+) -> Result<CommandRecipe, StoreError> {
+ let working_directory = raw
+ .working_directory
+ .map(Utf8PathBuf::from)
+ .unwrap_or_else(|| project_root.to_path_buf());
+ CommandRecipe::new(
+ working_directory,
+ crate::to_text_vec(raw.argv)?,
+ raw.env.into_iter().collect::<BTreeMap<_, _>>(),
+ )
+ .map_err(StoreError::from)
+}
+
+fn capture_code_snapshot(project_root: &Utf8Path) -> Result<CodeSnapshotRef, StoreError> {
+ crate::capture_code_snapshot(project_root)
+}
+
+fn parse_node_class_name(raw: &str) -> Result<NodeClass, StoreError> {
+ match raw {
+ "contract" => Ok(NodeClass::Contract),
+ "change" => Ok(NodeClass::Change),
+ "run" => Ok(NodeClass::Run),
+ "analysis" => Ok(NodeClass::Analysis),
+ "decision" => Ok(NodeClass::Decision),
+ "research" => Ok(NodeClass::Research),
+ "enabling" => Ok(NodeClass::Enabling),
+ "note" => Ok(NodeClass::Note),
+ other => Err(crate::invalid_input(format!(
+ "unknown node class `{other}`"
+ ))),
+ }
+}
+
+fn parse_metric_unit_name(raw: &str) -> Result<MetricUnit, StoreError> {
+ crate::parse_metric_unit(raw)
+}
+
+fn parse_backend_name(raw: &str) -> Result<ExecutionBackend, StoreError> {
+ match raw {
+ "local_process" => Ok(ExecutionBackend::LocalProcess),
+ "worktree_process" => Ok(ExecutionBackend::WorktreeProcess),
+ "ssh_process" => Ok(ExecutionBackend::SshProcess),
+ other => Err(crate::invalid_input(format!("unknown backend `{other}`"))),
+ }
+}
+
+fn parse_verdict_name(raw: &str) -> Result<FrontierVerdict, StoreError> {
+ match raw {
+ "promote_to_champion" => Ok(FrontierVerdict::PromoteToChampion),
+ "keep_on_frontier" => Ok(FrontierVerdict::KeepOnFrontier),
+ "revert_to_champion" => Ok(FrontierVerdict::RevertToChampion),
+ "archive_dead_end" => Ok(FrontierVerdict::ArchiveDeadEnd),
+ "needs_more_evidence" => Ok(FrontierVerdict::NeedsMoreEvidence),
+ other => Err(crate::invalid_input(format!("unknown verdict `{other}`"))),
+ }
+}
+
+#[derive(Debug, Deserialize)]
+struct FrontierStatusToolArgs {
+ frontier_id: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct FrontierInitToolArgs {
+ label: String,
+ objective: String,
+ contract_title: String,
+ contract_summary: Option<String>,
+ benchmark_suites: Vec<String>,
+ promotion_criteria: Vec<String>,
+ primary_metric: WireMetricSpec,
+ #[serde(default)]
+ supporting_metrics: Vec<WireMetricSpec>,
+ seed_summary: Option<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeCreateToolArgs {
+ class: String,
+ frontier_id: Option<String>,
+ title: String,
+ summary: Option<String>,
+ #[serde(default)]
+ payload: Option<Map<String, Value>>,
+ #[serde(default)]
+ annotations: Vec<WireAnnotation>,
+ #[serde(default)]
+ parents: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct ChangeRecordToolArgs {
+ frontier_id: String,
+ title: String,
+ summary: Option<String>,
+ body: String,
+ hypothesis: Option<String>,
+ base_checkpoint_id: Option<String>,
+ benchmark_suite: Option<String>,
+ #[serde(default)]
+ annotations: Vec<WireAnnotation>,
+ #[serde(default)]
+ parents: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeListToolArgs {
+ frontier_id: Option<String>,
+ class: Option<String>,
+ #[serde(default)]
+ include_archived: bool,
+ limit: Option<u32>,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeReadToolArgs {
+ node_id: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeAnnotateToolArgs {
+ node_id: String,
+ body: String,
+ label: Option<String>,
+ #[serde(default)]
+ visible: bool,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeArchiveToolArgs {
+ node_id: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct QuickNoteToolArgs {
+ frontier_id: Option<String>,
+ title: String,
+ body: String,
+ #[serde(default)]
+ annotations: Vec<WireAnnotation>,
+ #[serde(default)]
+ parents: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct ResearchRecordToolArgs {
+ frontier_id: Option<String>,
+ title: String,
+ summary: Option<String>,
+ body: String,
+ #[serde(default)]
+ annotations: Vec<WireAnnotation>,
+ #[serde(default)]
+ parents: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct ExperimentCloseToolArgs {
+ frontier_id: String,
+ base_checkpoint_id: String,
+ change_node_id: String,
+ candidate_summary: String,
+ run: WireRun,
+ primary_metric: WireMetricObservation,
+ #[serde(default)]
+ supporting_metrics: Vec<WireMetricObservation>,
+ note: WireFrontierNote,
+ verdict: String,
+ decision_title: String,
+ decision_rationale: String,
+ analysis_node_id: Option<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireAnnotation {
+ body: String,
+ label: Option<String>,
+ #[serde(default)]
+ visible: bool,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireMetricSpec {
+ key: String,
+ unit: String,
+ objective: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireMetricObservation {
+ key: String,
+ unit: String,
+ objective: String,
+ value: f64,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireRun {
+ title: String,
+ summary: Option<String>,
+ backend: String,
+ benchmark_suite: String,
+ command: WireRunCommand,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireRunCommand {
+ working_directory: Option<String>,
+ argv: Vec<String>,
+ #[serde(default)]
+ env: BTreeMap<String, String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireFrontierNote {
+ summary: String,
+ #[serde(default)]
+ next_hypotheses: Vec<String>,
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/telemetry.rs b/crates/fidget-spinner-cli/src/mcp/telemetry.rs
new file mode 100644
index 0000000..7206f76
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/telemetry.rs
@@ -0,0 +1,103 @@
+use std::collections::BTreeMap;
+
+use serde::{Deserialize, Serialize};
+
+use crate::mcp::fault::FaultRecord;
+
+#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct OperationTelemetry {
+ pub requests: u64,
+ pub successes: u64,
+ pub errors: u64,
+ pub retries: u64,
+ pub last_latency_ms: Option<u128>,
+}
+
+#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct ServerTelemetry {
+ pub requests: u64,
+ pub successes: u64,
+ pub errors: u64,
+ pub retries: u64,
+ pub worker_restarts: u64,
+ pub host_rollouts: u64,
+ pub last_fault: Option<FaultRecord>,
+ pub operations: BTreeMap<String, OperationTelemetry>,
+}
+
+impl ServerTelemetry {
+ pub fn record_request(&mut self, operation: &str) {
+ self.requests += 1;
+ self.operations
+ .entry(operation.to_owned())
+ .or_default()
+ .requests += 1;
+ }
+
+ pub fn record_success(&mut self, operation: &str, latency_ms: u128) {
+ self.successes += 1;
+ let entry = self.operations.entry(operation.to_owned()).or_default();
+ entry.successes += 1;
+ entry.last_latency_ms = Some(latency_ms);
+ }
+
+ pub fn record_retry(&mut self, operation: &str) {
+ self.retries += 1;
+ self.operations
+ .entry(operation.to_owned())
+ .or_default()
+ .retries += 1;
+ }
+
+ pub fn record_error(&mut self, operation: &str, fault: FaultRecord, latency_ms: u128) {
+ self.errors += 1;
+ self.last_fault = Some(fault.clone());
+ let entry = self.operations.entry(operation.to_owned()).or_default();
+ entry.errors += 1;
+ entry.last_latency_ms = Some(latency_ms);
+ }
+
+ pub fn record_worker_restart(&mut self) {
+ self.worker_restarts += 1;
+ }
+
+ pub fn record_rollout(&mut self) {
+ self.host_rollouts += 1;
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct InitializationHealth {
+ pub ready: bool,
+ pub seed_captured: bool,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct WorkerHealth {
+ pub worker_generation: u64,
+ pub alive: bool,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct BinaryHealth {
+ pub current_executable: String,
+ pub launch_path_stable: bool,
+ pub rollout_pending: bool,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct BindingHealth {
+ pub bound: bool,
+ pub requested_path: Option<String>,
+ pub project_root: Option<String>,
+ pub state_root: Option<String>,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct HealthSnapshot {
+ pub initialization: InitializationHealth,
+ pub binding: BindingHealth,
+ pub worker: WorkerHealth,
+ pub binary: BinaryHealth,
+ pub last_fault: Option<FaultRecord>,
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/worker.rs b/crates/fidget-spinner-cli/src/mcp/worker.rs
new file mode 100644
index 0000000..91c6db9
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/worker.rs
@@ -0,0 +1,66 @@
+use std::io::{self, BufRead, Write};
+use std::path::PathBuf;
+
+use camino::Utf8PathBuf;
+
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+use crate::mcp::protocol::{WorkerOutcome, WorkerRequest, WorkerResponse};
+use crate::mcp::service::WorkerService;
+
+pub(crate) fn serve(project: PathBuf) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let project = Utf8PathBuf::from(project.to_string_lossy().into_owned());
+ let mut service = WorkerService::new(&project)?;
+ let stdin = io::stdin();
+ let mut stdout = io::stdout().lock();
+
+ for line in stdin.lock().lines() {
+ let line = match line {
+ Ok(line) => line,
+ Err(error) => {
+ eprintln!("worker stdin failure: {error}");
+ continue;
+ }
+ };
+ if line.trim().is_empty() {
+ continue;
+ }
+
+ let request = match serde_json::from_str::<WorkerRequest>(&line) {
+ Ok(request) => request,
+ Err(error) => {
+ let response = WorkerResponse {
+ id: crate::mcp::protocol::HostRequestId(0),
+ outcome: WorkerOutcome::Fault {
+ fault: FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ "worker.parse",
+ format!("invalid worker request: {error}"),
+ ),
+ },
+ };
+ write_message(&mut stdout, &response)?;
+ continue;
+ }
+ };
+
+ let WorkerRequest::Execute { id, operation } = request;
+ let outcome = match service.execute(operation) {
+ Ok(result) => WorkerOutcome::Success { result },
+ Err(fault) => WorkerOutcome::Fault { fault },
+ };
+ write_message(&mut stdout, &WorkerResponse { id, outcome })?;
+ }
+
+ Ok(())
+}
+
+fn write_message(
+ stdout: &mut impl Write,
+ response: &WorkerResponse,
+) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ serde_json::to_writer(&mut *stdout, response)?;
+ stdout.write_all(b"\n")?;
+ stdout.flush()?;
+ Ok(())
+}