swarm repositories / source
aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/fidget-spinner-cli/Cargo.toml22
-rw-r--r--crates/fidget-spinner-cli/src/bundled_skill.rs69
-rw-r--r--crates/fidget-spinner-cli/src/main.rs958
-rw-r--r--crates/fidget-spinner-cli/src/mcp/catalog.rs541
-rw-r--r--crates/fidget-spinner-cli/src/mcp/fault.rs99
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/binary.rs43
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/config.rs18
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/mod.rs8
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/process.rs246
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/runtime.rs719
-rw-r--r--crates/fidget-spinner-cli/src/mcp/mod.rs10
-rw-r--r--crates/fidget-spinner-cli/src/mcp/protocol.rs86
-rw-r--r--crates/fidget-spinner-cli/src/mcp/service.rs813
-rw-r--r--crates/fidget-spinner-cli/src/mcp/telemetry.rs103
-rw-r--r--crates/fidget-spinner-cli/src/mcp/worker.rs66
-rw-r--r--crates/fidget-spinner-cli/tests/mcp_hardening.rs424
-rw-r--r--crates/fidget-spinner-core/Cargo.toml19
-rw-r--r--crates/fidget-spinner-core/src/error.rs9
-rw-r--r--crates/fidget-spinner-core/src/id.rs46
-rw-r--r--crates/fidget-spinner-core/src/lib.rs26
-rw-r--r--crates/fidget-spinner-core/src/model.rs693
-rw-r--r--crates/fidget-spinner-store-sqlite/Cargo.toml21
-rw-r--r--crates/fidget-spinner-store-sqlite/src/lib.rs1810
23 files changed, 6849 insertions, 0 deletions
diff --git a/crates/fidget-spinner-cli/Cargo.toml b/crates/fidget-spinner-cli/Cargo.toml
new file mode 100644
index 0000000..4ca70e9
--- /dev/null
+++ b/crates/fidget-spinner-cli/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "fidget-spinner-cli"
+description = "Thin local entrypoint for Fidget Spinner"
+edition.workspace = true
+license.workspace = true
+publish = false
+rust-version.workspace = true
+version.workspace = true
+
+[dependencies]
+camino.workspace = true
+clap.workspace = true
+dirs.workspace = true
+fidget-spinner-core = { path = "../fidget-spinner-core" }
+fidget-spinner-store-sqlite = { path = "../fidget-spinner-store-sqlite" }
+serde.workspace = true
+serde_json.workspace = true
+time.workspace = true
+uuid.workspace = true
+
+[lints]
+workspace = true
diff --git a/crates/fidget-spinner-cli/src/bundled_skill.rs b/crates/fidget-spinner-cli/src/bundled_skill.rs
new file mode 100644
index 0000000..85760bc
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/bundled_skill.rs
@@ -0,0 +1,69 @@
+use serde::Serialize;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) struct BundledSkill {
+ pub name: &'static str,
+ pub description: &'static str,
+ pub resource_uri: &'static str,
+ pub body: &'static str,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize)]
+pub(crate) struct BundledSkillSummary {
+ pub name: &'static str,
+ pub description: &'static str,
+ pub resource_uri: &'static str,
+}
+
+impl BundledSkill {
+ #[must_use]
+ pub const fn summary(self) -> BundledSkillSummary {
+ BundledSkillSummary {
+ name: self.name,
+ description: self.description,
+ resource_uri: self.resource_uri,
+ }
+ }
+}
+
+const BUNDLED_SKILLS: [BundledSkill; 2] = [
+ BundledSkill {
+ name: "fidget-spinner",
+ description: "Base skill for working inside a Fidget Spinner project through the local DAG and MCP surface.",
+ resource_uri: "fidget-spinner://skill/fidget-spinner",
+ body: include_str!("../../../assets/codex-skills/fidget-spinner/SKILL.md"),
+ },
+ BundledSkill {
+ name: "frontier-loop",
+ description: "Aggressive autonomous frontier-push specialization for Fidget Spinner.",
+ resource_uri: "fidget-spinner://skill/frontier-loop",
+ body: include_str!("../../../assets/codex-skills/frontier-loop/SKILL.md"),
+ },
+];
+
+#[must_use]
+pub(crate) const fn default_bundled_skill() -> BundledSkill {
+ BUNDLED_SKILLS[0]
+}
+
+#[must_use]
+pub(crate) const fn frontier_loop_bundled_skill() -> BundledSkill {
+ BUNDLED_SKILLS[1]
+}
+
+#[must_use]
+pub(crate) fn bundled_skill(name: &str) -> Option<BundledSkill> {
+ BUNDLED_SKILLS
+ .iter()
+ .copied()
+ .find(|skill| skill.name == name)
+}
+
+#[must_use]
+pub(crate) fn bundled_skill_summaries() -> Vec<BundledSkillSummary> {
+ BUNDLED_SKILLS
+ .iter()
+ .copied()
+ .map(BundledSkill::summary)
+ .collect()
+}
diff --git a/crates/fidget-spinner-cli/src/main.rs b/crates/fidget-spinner-cli/src/main.rs
new file mode 100644
index 0000000..9b2b8ae
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/main.rs
@@ -0,0 +1,958 @@
+mod bundled_skill;
+mod mcp;
+
+use std::collections::{BTreeMap, BTreeSet};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use camino::{Utf8Path, Utf8PathBuf};
+use clap::{Args, Parser, Subcommand, ValueEnum};
+use fidget_spinner_core::{
+ AnnotationVisibility, CodeSnapshotRef, CommandRecipe, ExecutionBackend, FrontierContract,
+ FrontierNote, FrontierVerdict, GitCommitHash, MetricObservation, MetricSpec, MetricUnit,
+ NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective,
+};
+use fidget_spinner_store_sqlite::{
+ CloseExperimentRequest, CreateFrontierRequest, CreateNodeRequest, EdgeAttachment,
+ EdgeAttachmentDirection, ListNodesQuery, ProjectStore, StoreError,
+};
+use serde::Serialize;
+use serde_json::{Map, Value, json};
+use uuid::Uuid;
+
+#[derive(Parser)]
+#[command(author, version, about = "Fidget Spinner local project CLI")]
+struct Cli {
+ #[command(subcommand)]
+ command: Command,
+}
+
+#[derive(Subcommand)]
+enum Command {
+ Init(InitArgs),
+ Schema {
+ #[command(subcommand)]
+ command: SchemaCommand,
+ },
+ Frontier {
+ #[command(subcommand)]
+ command: FrontierCommand,
+ },
+ Node {
+ #[command(subcommand)]
+ command: NodeCommand,
+ },
+ Note(NoteCommand),
+ Research(ResearchCommand),
+ Experiment {
+ #[command(subcommand)]
+ command: ExperimentCommand,
+ },
+ Mcp {
+ #[command(subcommand)]
+ command: McpCommand,
+ },
+ Skill {
+ #[command(subcommand)]
+ command: SkillCommand,
+ },
+}
+
+#[derive(Args)]
+struct InitArgs {
+ #[arg(long, default_value = ".")]
+ project: PathBuf,
+ #[arg(long)]
+ name: Option<String>,
+ #[arg(long, default_value = "local.project")]
+ namespace: String,
+}
+
+#[derive(Subcommand)]
+enum SchemaCommand {
+ Show(ProjectArg),
+}
+
+#[derive(Subcommand)]
+enum FrontierCommand {
+ Init(FrontierInitArgs),
+ Status(FrontierStatusArgs),
+}
+
+#[derive(Args)]
+struct FrontierInitArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ label: String,
+ #[arg(long)]
+ objective: String,
+ #[arg(long, default_value = "frontier contract")]
+ contract_title: String,
+ #[arg(long)]
+ contract_summary: Option<String>,
+ #[arg(long = "benchmark-suite")]
+ benchmark_suites: Vec<String>,
+ #[arg(long = "promotion-criterion")]
+ promotion_criteria: Vec<String>,
+ #[arg(long = "primary-metric-key")]
+ primary_metric_key: String,
+ #[arg(long = "primary-metric-unit", value_enum)]
+ primary_metric_unit: CliMetricUnit,
+ #[arg(long = "primary-metric-objective", value_enum)]
+ primary_metric_objective: CliOptimizationObjective,
+ #[arg(long = "seed-summary", default_value = "initial champion checkpoint")]
+ seed_summary: String,
+}
+
+#[derive(Args)]
+struct FrontierStatusArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: Option<String>,
+}
+
+#[derive(Subcommand)]
+enum NodeCommand {
+ Add(NodeAddArgs),
+ List(NodeListArgs),
+ Show(NodeShowArgs),
+ Annotate(NodeAnnotateArgs),
+ Archive(NodeArchiveArgs),
+}
+
+#[derive(Args)]
+struct NodeAddArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long, value_enum)]
+ class: CliNodeClass,
+ #[arg(long)]
+ frontier: Option<String>,
+ #[arg(long)]
+ title: String,
+ #[arg(long)]
+ summary: Option<String>,
+ #[arg(long = "payload-json")]
+ payload_json: Option<String>,
+ #[arg(long = "payload-file")]
+ payload_file: Option<PathBuf>,
+ #[arg(long = "field")]
+ fields: Vec<String>,
+ #[arg(long = "annotation")]
+ annotations: Vec<String>,
+ #[arg(long = "parent")]
+ parents: Vec<String>,
+}
+
+#[derive(Args)]
+struct NodeListArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: Option<String>,
+ #[arg(long, value_enum)]
+ class: Option<CliNodeClass>,
+ #[arg(long)]
+ include_archived: bool,
+ #[arg(long, default_value_t = 20)]
+ limit: u32,
+}
+
+#[derive(Args)]
+struct NodeShowArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ node: String,
+}
+
+#[derive(Args)]
+struct NodeAnnotateArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ node: String,
+ #[arg(long)]
+ body: String,
+ #[arg(long)]
+ label: Option<String>,
+ #[arg(long)]
+ visible: bool,
+}
+
+#[derive(Args)]
+struct NodeArchiveArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ node: String,
+}
+
+#[derive(Args)]
+struct NoteCommand {
+ #[command(subcommand)]
+ command: NoteSubcommand,
+}
+
+#[derive(Subcommand)]
+enum NoteSubcommand {
+ Quick(QuickNoteArgs),
+}
+
+#[derive(Args)]
+struct ResearchCommand {
+ #[command(subcommand)]
+ command: ResearchSubcommand,
+}
+
+#[derive(Subcommand)]
+enum ResearchSubcommand {
+ Add(QuickResearchArgs),
+}
+
+#[derive(Args)]
+struct QuickNoteArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: Option<String>,
+ #[arg(long)]
+ title: String,
+ #[arg(long)]
+ body: String,
+ #[arg(long = "parent")]
+ parents: Vec<String>,
+}
+
+#[derive(Args)]
+struct QuickResearchArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: Option<String>,
+ #[arg(long)]
+ title: String,
+ #[arg(long)]
+ body: String,
+ #[arg(long)]
+ summary: Option<String>,
+ #[arg(long = "parent")]
+ parents: Vec<String>,
+}
+
+#[derive(Subcommand)]
+enum ExperimentCommand {
+ Close(ExperimentCloseArgs),
+}
+
+#[derive(Subcommand)]
+enum McpCommand {
+ Serve(McpServeArgs),
+ #[command(hide = true)]
+ Worker(McpWorkerArgs),
+}
+
+#[derive(Args)]
+struct ExperimentCloseArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ frontier: String,
+ #[arg(long = "base-checkpoint")]
+ base_checkpoint: String,
+ #[arg(long = "change-node")]
+ change_node: String,
+ #[arg(long = "candidate-summary")]
+ candidate_summary: String,
+ #[arg(long = "run-title")]
+ run_title: String,
+ #[arg(long = "run-summary")]
+ run_summary: Option<String>,
+ #[arg(long = "benchmark-suite")]
+ benchmark_suite: String,
+ #[arg(long = "backend", value_enum, default_value_t = CliExecutionBackend::Worktree)]
+ backend: CliExecutionBackend,
+ #[arg(long = "cwd")]
+ working_directory: Option<PathBuf>,
+ #[arg(long = "argv")]
+ argv: Vec<String>,
+ #[arg(long = "env")]
+ env: Vec<String>,
+ #[arg(long = "primary-metric-key")]
+ primary_metric_key: String,
+ #[arg(long = "primary-metric-unit", value_enum)]
+ primary_metric_unit: CliMetricUnit,
+ #[arg(long = "primary-metric-objective", value_enum)]
+ primary_metric_objective: CliOptimizationObjective,
+ #[arg(long = "primary-metric-value")]
+ primary_metric_value: f64,
+ #[arg(long = "metric")]
+ metrics: Vec<String>,
+ #[arg(long)]
+ note: String,
+ #[arg(long = "next-hypothesis")]
+ next_hypotheses: Vec<String>,
+ #[arg(long = "verdict", value_enum)]
+ verdict: CliFrontierVerdict,
+ #[arg(long = "decision-title")]
+ decision_title: String,
+ #[arg(long = "decision-rationale")]
+ decision_rationale: String,
+}
+
+#[derive(Subcommand)]
+enum SkillCommand {
+ List,
+ Install(SkillInstallArgs),
+ Show(SkillShowArgs),
+}
+
+#[derive(Args)]
+struct SkillInstallArgs {
+ #[arg(long)]
+ name: Option<String>,
+ #[arg(long)]
+ destination: Option<PathBuf>,
+}
+
+#[derive(Args)]
+struct SkillShowArgs {
+ #[arg(long)]
+ name: Option<String>,
+}
+
+#[derive(Args)]
+struct ProjectArg {
+ #[arg(long, default_value = ".")]
+ project: PathBuf,
+}
+
+#[derive(Args)]
+struct McpServeArgs {
+ #[arg(long)]
+ project: Option<PathBuf>,
+}
+
+#[derive(Args)]
+struct McpWorkerArgs {
+ #[arg(long)]
+ project: PathBuf,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliNodeClass {
+ Contract,
+ Change,
+ Run,
+ Analysis,
+ Decision,
+ Research,
+ Enabling,
+ Note,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliMetricUnit {
+ Seconds,
+ Bytes,
+ Count,
+ Ratio,
+ Custom,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliOptimizationObjective {
+ Minimize,
+ Maximize,
+ Target,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliExecutionBackend {
+ Local,
+ Worktree,
+ Ssh,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
+enum CliFrontierVerdict {
+ PromoteToChampion,
+ KeepOnFrontier,
+ RevertToChampion,
+ ArchiveDeadEnd,
+ NeedsMoreEvidence,
+}
+
+fn main() {
+ if let Err(error) = run() {
+ eprintln!("error: {error}");
+ std::process::exit(1);
+ }
+}
+
+fn run() -> Result<(), StoreError> {
+ let cli = Cli::parse();
+ match cli.command {
+ Command::Init(args) => run_init(args),
+ Command::Schema { command } => match command {
+ SchemaCommand::Show(project) => {
+ let store = open_store(&project.project)?;
+ print_json(store.schema())
+ }
+ },
+ Command::Frontier { command } => match command {
+ FrontierCommand::Init(args) => run_frontier_init(args),
+ FrontierCommand::Status(args) => run_frontier_status(args),
+ },
+ Command::Node { command } => match command {
+ NodeCommand::Add(args) => run_node_add(args),
+ NodeCommand::List(args) => run_node_list(args),
+ NodeCommand::Show(args) => run_node_show(args),
+ NodeCommand::Annotate(args) => run_node_annotate(args),
+ NodeCommand::Archive(args) => run_node_archive(args),
+ },
+ Command::Note(command) => match command.command {
+ NoteSubcommand::Quick(args) => run_quick_note(args),
+ },
+ Command::Research(command) => match command.command {
+ ResearchSubcommand::Add(args) => run_quick_research(args),
+ },
+ Command::Experiment { command } => match command {
+ ExperimentCommand::Close(args) => run_experiment_close(args),
+ },
+ Command::Mcp { command } => match command {
+ McpCommand::Serve(args) => mcp::serve(args.project),
+ McpCommand::Worker(args) => mcp::serve_worker(args.project),
+ },
+ Command::Skill { command } => match command {
+ SkillCommand::List => print_json(&bundled_skill::bundled_skill_summaries()),
+ SkillCommand::Install(args) => run_skill_install(args),
+ SkillCommand::Show(args) => {
+ println!("{}", resolve_bundled_skill(args.name.as_deref())?.body);
+ Ok(())
+ }
+ },
+ }
+}
+
+fn run_init(args: InitArgs) -> Result<(), StoreError> {
+ let project_root = utf8_path(args.project);
+ let display_name = NonEmptyText::new(args.name.unwrap_or_else(|| {
+ project_root
+ .file_name()
+ .map_or_else(|| "fidget-spinner-project".to_owned(), ToOwned::to_owned)
+ }))?;
+ let namespace = NonEmptyText::new(args.namespace)?;
+ let store = ProjectStore::init(&project_root, display_name, namespace)?;
+ println!("initialized {}", store.state_root());
+ println!("project: {}", store.config().display_name);
+ println!("schema: {}", store.state_root().join("schema.json"));
+ Ok(())
+}
+
+fn run_frontier_init(args: FrontierInitArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let initial_checkpoint =
+ store.auto_capture_checkpoint(NonEmptyText::new(args.seed_summary)?)?;
+ let projection = store.create_frontier(CreateFrontierRequest {
+ label: NonEmptyText::new(args.label)?,
+ contract_title: NonEmptyText::new(args.contract_title)?,
+ contract_summary: args.contract_summary.map(NonEmptyText::new).transpose()?,
+ contract: FrontierContract {
+ objective: NonEmptyText::new(args.objective)?,
+ evaluation: fidget_spinner_core::EvaluationProtocol {
+ benchmark_suites: to_text_set(args.benchmark_suites)?,
+ primary_metric: MetricSpec {
+ metric_key: NonEmptyText::new(args.primary_metric_key)?,
+ unit: args.primary_metric_unit.into(),
+ objective: args.primary_metric_objective.into(),
+ },
+ supporting_metrics: BTreeSet::new(),
+ },
+ promotion_criteria: to_text_vec(args.promotion_criteria)?,
+ },
+ initial_checkpoint,
+ })?;
+ print_json(&projection)
+}
+
+fn run_frontier_status(args: FrontierStatusArgs) -> Result<(), StoreError> {
+ let store = open_store(&args.project.project)?;
+ if let Some(frontier) = args.frontier {
+ let projection = store.frontier_projection(parse_frontier_id(&frontier)?)?;
+ return print_json(&projection);
+ }
+ let frontiers = store.list_frontiers()?;
+ if frontiers.len() == 1 {
+ return print_json(&store.frontier_projection(frontiers[0].id)?);
+ }
+ print_json(&frontiers)
+}
+
+fn run_node_add(args: NodeAddArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let frontier_id = args
+ .frontier
+ .as_deref()
+ .map(parse_frontier_id)
+ .transpose()?;
+ let payload = load_payload(
+ store.schema().schema_ref(),
+ args.payload_json,
+ args.payload_file,
+ args.fields,
+ )?;
+ let annotations = args
+ .annotations
+ .into_iter()
+ .map(|body| Ok(NodeAnnotation::hidden(NonEmptyText::new(body)?)))
+ .collect::<Result<Vec<_>, StoreError>>()?;
+ let node = store.add_node(CreateNodeRequest {
+ class: args.class.into(),
+ frontier_id,
+ title: NonEmptyText::new(args.title)?,
+ summary: args.summary.map(NonEmptyText::new).transpose()?,
+ payload,
+ annotations,
+ attachments: lineage_attachments(args.parents)?,
+ })?;
+ print_json(&node)
+}
+
+fn run_node_list(args: NodeListArgs) -> Result<(), StoreError> {
+ let store = open_store(&args.project.project)?;
+ let items = store.list_nodes(ListNodesQuery {
+ frontier_id: args
+ .frontier
+ .as_deref()
+ .map(parse_frontier_id)
+ .transpose()?,
+ class: args.class.map(Into::into),
+ include_archived: args.include_archived,
+ limit: args.limit,
+ })?;
+ print_json(&items)
+}
+
+fn run_node_show(args: NodeShowArgs) -> Result<(), StoreError> {
+ let store = open_store(&args.project.project)?;
+ let node_id = parse_node_id(&args.node)?;
+ let node = store
+ .get_node(node_id)?
+ .ok_or(StoreError::NodeNotFound(node_id))?;
+ print_json(&node)
+}
+
+fn run_node_annotate(args: NodeAnnotateArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let annotation = NodeAnnotation {
+ id: fidget_spinner_core::AnnotationId::fresh(),
+ visibility: if args.visible {
+ AnnotationVisibility::Visible
+ } else {
+ AnnotationVisibility::HiddenByDefault
+ },
+ label: args.label.map(NonEmptyText::new).transpose()?,
+ body: NonEmptyText::new(args.body)?,
+ created_at: time::OffsetDateTime::now_utc(),
+ };
+ store.annotate_node(parse_node_id(&args.node)?, annotation)?;
+ println!("annotated {}", args.node);
+ Ok(())
+}
+
+fn run_node_archive(args: NodeArchiveArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ store.archive_node(parse_node_id(&args.node)?)?;
+ println!("archived {}", args.node);
+ Ok(())
+}
+
+fn run_quick_note(args: QuickNoteArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let payload = NodePayload::with_schema(
+ store.schema().schema_ref(),
+ json_object(json!({ "body": args.body }))?,
+ );
+ let node = store.add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: args
+ .frontier
+ .as_deref()
+ .map(parse_frontier_id)
+ .transpose()?,
+ title: NonEmptyText::new(args.title)?,
+ summary: None,
+ payload,
+ annotations: Vec::new(),
+ attachments: lineage_attachments(args.parents)?,
+ })?;
+ print_json(&node)
+}
+
+fn run_quick_research(args: QuickResearchArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let payload = NodePayload::with_schema(
+ store.schema().schema_ref(),
+ json_object(json!({ "body": args.body }))?,
+ );
+ let node = store.add_node(CreateNodeRequest {
+ class: NodeClass::Research,
+ frontier_id: args
+ .frontier
+ .as_deref()
+ .map(parse_frontier_id)
+ .transpose()?,
+ title: NonEmptyText::new(args.title)?,
+ summary: args.summary.map(NonEmptyText::new).transpose()?,
+ payload,
+ annotations: Vec::new(),
+ attachments: lineage_attachments(args.parents)?,
+ })?;
+ print_json(&node)
+}
+
+fn run_experiment_close(args: ExperimentCloseArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let frontier_id = parse_frontier_id(&args.frontier)?;
+ let snapshot = store
+ .auto_capture_checkpoint(NonEmptyText::new(args.candidate_summary.clone())?)?
+ .map(|seed| seed.snapshot)
+ .ok_or(StoreError::GitInspectionFailed(
+ store.project_root().to_path_buf(),
+ ))?;
+ let command = CommandRecipe::new(
+ args.working_directory
+ .map(utf8_path)
+ .unwrap_or_else(|| store.project_root().to_path_buf()),
+ to_text_vec(args.argv)?,
+ parse_env(args.env),
+ )?;
+ let receipt = store.close_experiment(CloseExperimentRequest {
+ frontier_id,
+ base_checkpoint_id: parse_checkpoint_id(&args.base_checkpoint)?,
+ change_node_id: parse_node_id(&args.change_node)?,
+ candidate_summary: NonEmptyText::new(args.candidate_summary)?,
+ candidate_snapshot: snapshot,
+ run_title: NonEmptyText::new(args.run_title)?,
+ run_summary: args.run_summary.map(NonEmptyText::new).transpose()?,
+ backend: args.backend.into(),
+ benchmark_suite: NonEmptyText::new(args.benchmark_suite)?,
+ command,
+ code_snapshot: Some(capture_code_snapshot(store.project_root())?),
+ primary_metric: MetricObservation {
+ metric_key: NonEmptyText::new(args.primary_metric_key)?,
+ unit: args.primary_metric_unit.into(),
+ objective: args.primary_metric_objective.into(),
+ value: args.primary_metric_value,
+ },
+ supporting_metrics: args
+ .metrics
+ .into_iter()
+ .map(parse_metric_observation)
+ .collect::<Result<Vec<_>, _>>()?,
+ note: FrontierNote {
+ summary: NonEmptyText::new(args.note)?,
+ next_hypotheses: to_text_vec(args.next_hypotheses)?,
+ },
+ verdict: args.verdict.into(),
+ decision_title: NonEmptyText::new(args.decision_title)?,
+ decision_rationale: NonEmptyText::new(args.decision_rationale)?,
+ analysis_node_id: None,
+ })?;
+ print_json(&receipt)
+}
+
+fn run_skill_install(args: SkillInstallArgs) -> Result<(), StoreError> {
+ if let Some(name) = args.name.as_deref() {
+ let skill = resolve_bundled_skill(Some(name))?;
+ let destination = args
+ .destination
+ .unwrap_or(default_skill_root()?.join(skill.name));
+ install_skill(skill, &destination)?;
+ println!("{}", destination.display());
+ } else {
+ let destination_root = args.destination.unwrap_or(default_skill_root()?);
+ for skill in bundled_skill::bundled_skill_summaries() {
+ let destination = destination_root.join(skill.name);
+ install_skill(resolve_bundled_skill(Some(skill.name))?, &destination)?;
+ println!("{}", destination.display());
+ }
+ }
+ Ok(())
+}
+
+fn resolve_bundled_skill(
+ requested_name: Option<&str>,
+) -> Result<bundled_skill::BundledSkill, StoreError> {
+ requested_name.map_or_else(
+ || Ok(bundled_skill::default_bundled_skill()),
+ |name| {
+ bundled_skill::bundled_skill(name)
+ .ok_or_else(|| invalid_input(format!("unknown bundled skill `{name}`")))
+ },
+ )
+}
+
+fn default_skill_root() -> Result<PathBuf, StoreError> {
+ dirs::home_dir()
+ .map(|home| home.join(".codex/skills"))
+ .ok_or_else(|| invalid_input("home directory not found"))
+}
+
+fn install_skill(skill: bundled_skill::BundledSkill, destination: &Path) -> Result<(), StoreError> {
+ fs::create_dir_all(destination)?;
+ fs::write(destination.join("SKILL.md"), skill.body)?;
+ Ok(())
+}
+
+fn open_store(path: &Path) -> Result<ProjectStore, StoreError> {
+ ProjectStore::open(utf8_path(path.to_path_buf()))
+}
+
+fn utf8_path(path: impl Into<PathBuf>) -> Utf8PathBuf {
+ Utf8PathBuf::from(path.into().to_string_lossy().into_owned())
+}
+
+fn to_text_vec(values: Vec<String>) -> Result<Vec<NonEmptyText>, StoreError> {
+ values
+ .into_iter()
+ .map(NonEmptyText::new)
+ .collect::<Result<Vec<_>, _>>()
+ .map_err(StoreError::from)
+}
+
+fn to_text_set(values: Vec<String>) -> Result<BTreeSet<NonEmptyText>, StoreError> {
+ to_text_vec(values).map(BTreeSet::from_iter)
+}
+
+fn parse_env(values: Vec<String>) -> BTreeMap<String, String> {
+ values
+ .into_iter()
+ .filter_map(|entry| {
+ let (key, value) = entry.split_once('=')?;
+ Some((key.to_owned(), value.to_owned()))
+ })
+ .collect()
+}
+
+fn lineage_attachments(parents: Vec<String>) -> Result<Vec<EdgeAttachment>, StoreError> {
+ parents
+ .into_iter()
+ .map(|parent| {
+ Ok(EdgeAttachment {
+ node_id: parse_node_id(&parent)?,
+ kind: fidget_spinner_core::EdgeKind::Lineage,
+ direction: EdgeAttachmentDirection::ExistingToNew,
+ })
+ })
+ .collect()
+}
+
+fn load_payload(
+ schema: fidget_spinner_core::PayloadSchemaRef,
+ payload_json: Option<String>,
+ payload_file: Option<PathBuf>,
+ fields: Vec<String>,
+) -> Result<NodePayload, StoreError> {
+ let mut map = Map::new();
+ if let Some(text) = payload_json {
+ map.extend(json_object(serde_json::from_str::<Value>(&text)?)?);
+ }
+ if let Some(path) = payload_file {
+ let text = fs::read_to_string(path)?;
+ map.extend(json_object(serde_json::from_str::<Value>(&text)?)?);
+ }
+ for field in fields {
+ let Some((key, raw_value)) = field.split_once('=') else {
+ continue;
+ };
+ let value = serde_json::from_str::<Value>(raw_value).unwrap_or_else(|_| json!(raw_value));
+ let _ = map.insert(key.to_owned(), value);
+ }
+ Ok(NodePayload::with_schema(schema, map))
+}
+
+fn json_object(value: Value) -> Result<Map<String, Value>, StoreError> {
+ match value {
+ Value::Object(map) => Ok(map),
+ other => Err(invalid_input(format!(
+ "expected JSON object, got {other:?}"
+ ))),
+ }
+}
+
+fn capture_code_snapshot(project_root: &Utf8Path) -> Result<CodeSnapshotRef, StoreError> {
+ let head_commit = run_git(project_root, &["rev-parse", "HEAD"])?;
+ let dirty_paths = run_git(project_root, &["status", "--porcelain"])?
+ .map(|status| {
+ status
+ .lines()
+ .filter_map(|line| line.get(3..).map(str::trim))
+ .filter(|line| !line.is_empty())
+ .map(Utf8PathBuf::from)
+ .collect::<BTreeSet<_>>()
+ })
+ .unwrap_or_default();
+ Ok(CodeSnapshotRef {
+ repo_root: run_git(project_root, &["rev-parse", "--show-toplevel"])?
+ .map(Utf8PathBuf::from)
+ .unwrap_or_else(|| project_root.to_path_buf()),
+ worktree_root: project_root.to_path_buf(),
+ worktree_name: run_git(project_root, &["rev-parse", "--abbrev-ref", "HEAD"])?
+ .map(NonEmptyText::new)
+ .transpose()?,
+ head_commit: head_commit.map(GitCommitHash::new).transpose()?,
+ dirty_paths,
+ })
+}
+
+fn run_git(project_root: &Utf8Path, args: &[&str]) -> Result<Option<String>, StoreError> {
+ let output = std::process::Command::new("git")
+ .arg("-C")
+ .arg(project_root.as_str())
+ .args(args)
+ .output()?;
+ if !output.status.success() {
+ return Ok(None);
+ }
+ let text = String::from_utf8_lossy(&output.stdout).trim().to_owned();
+ if text.is_empty() {
+ return Ok(None);
+ }
+ Ok(Some(text))
+}
+
+fn parse_metric_observation(raw: String) -> Result<MetricObservation, StoreError> {
+ let parts = raw.split(':').collect::<Vec<_>>();
+ if parts.len() != 4 {
+ return Err(invalid_input(
+ "metrics must look like key:unit:objective:value",
+ ));
+ }
+ Ok(MetricObservation {
+ metric_key: NonEmptyText::new(parts[0])?,
+ unit: parse_metric_unit(parts[1])?,
+ objective: parse_optimization_objective(parts[2])?,
+ value: parts[3]
+ .parse::<f64>()
+ .map_err(|error| invalid_input(format!("invalid metric value: {error}")))?,
+ })
+}
+
+fn parse_metric_unit(raw: &str) -> Result<MetricUnit, StoreError> {
+ match raw {
+ "seconds" => Ok(MetricUnit::Seconds),
+ "bytes" => Ok(MetricUnit::Bytes),
+ "count" => Ok(MetricUnit::Count),
+ "ratio" => Ok(MetricUnit::Ratio),
+ "custom" => Ok(MetricUnit::Custom),
+ other => Err(invalid_input(format!("unknown metric unit `{other}`"))),
+ }
+}
+
+fn parse_optimization_objective(raw: &str) -> Result<OptimizationObjective, StoreError> {
+ match raw {
+ "minimize" => Ok(OptimizationObjective::Minimize),
+ "maximize" => Ok(OptimizationObjective::Maximize),
+ "target" => Ok(OptimizationObjective::Target),
+ other => Err(invalid_input(format!(
+ "unknown optimization objective `{other}`"
+ ))),
+ }
+}
+
+fn parse_node_id(raw: &str) -> Result<fidget_spinner_core::NodeId, StoreError> {
+ Ok(fidget_spinner_core::NodeId::from_uuid(Uuid::parse_str(
+ raw,
+ )?))
+}
+
+fn parse_frontier_id(raw: &str) -> Result<fidget_spinner_core::FrontierId, StoreError> {
+ Ok(fidget_spinner_core::FrontierId::from_uuid(Uuid::parse_str(
+ raw,
+ )?))
+}
+
+fn parse_checkpoint_id(raw: &str) -> Result<fidget_spinner_core::CheckpointId, StoreError> {
+ Ok(fidget_spinner_core::CheckpointId::from_uuid(
+ Uuid::parse_str(raw)?,
+ ))
+}
+
+fn print_json<T: Serialize>(value: &T) -> Result<(), StoreError> {
+ println!("{}", to_pretty_json(value)?);
+ Ok(())
+}
+
+fn to_pretty_json<T: Serialize>(value: &T) -> Result<String, StoreError> {
+ serde_json::to_string_pretty(value).map_err(StoreError::from)
+}
+
+fn invalid_input(message: impl Into<String>) -> StoreError {
+ StoreError::Json(serde_json::Error::io(std::io::Error::new(
+ std::io::ErrorKind::InvalidInput,
+ message.into(),
+ )))
+}
+
+impl From<CliNodeClass> for NodeClass {
+ fn from(value: CliNodeClass) -> Self {
+ match value {
+ CliNodeClass::Contract => Self::Contract,
+ CliNodeClass::Change => Self::Change,
+ CliNodeClass::Run => Self::Run,
+ CliNodeClass::Analysis => Self::Analysis,
+ CliNodeClass::Decision => Self::Decision,
+ CliNodeClass::Research => Self::Research,
+ CliNodeClass::Enabling => Self::Enabling,
+ CliNodeClass::Note => Self::Note,
+ }
+ }
+}
+
+impl From<CliMetricUnit> for MetricUnit {
+ fn from(value: CliMetricUnit) -> Self {
+ match value {
+ CliMetricUnit::Seconds => Self::Seconds,
+ CliMetricUnit::Bytes => Self::Bytes,
+ CliMetricUnit::Count => Self::Count,
+ CliMetricUnit::Ratio => Self::Ratio,
+ CliMetricUnit::Custom => Self::Custom,
+ }
+ }
+}
+
+impl From<CliOptimizationObjective> for OptimizationObjective {
+ fn from(value: CliOptimizationObjective) -> Self {
+ match value {
+ CliOptimizationObjective::Minimize => Self::Minimize,
+ CliOptimizationObjective::Maximize => Self::Maximize,
+ CliOptimizationObjective::Target => Self::Target,
+ }
+ }
+}
+
+impl From<CliExecutionBackend> for ExecutionBackend {
+ fn from(value: CliExecutionBackend) -> Self {
+ match value {
+ CliExecutionBackend::Local => Self::LocalProcess,
+ CliExecutionBackend::Worktree => Self::WorktreeProcess,
+ CliExecutionBackend::Ssh => Self::SshProcess,
+ }
+ }
+}
+
+impl From<CliFrontierVerdict> for FrontierVerdict {
+ fn from(value: CliFrontierVerdict) -> Self {
+ match value {
+ CliFrontierVerdict::PromoteToChampion => Self::PromoteToChampion,
+ CliFrontierVerdict::KeepOnFrontier => Self::KeepOnFrontier,
+ CliFrontierVerdict::RevertToChampion => Self::RevertToChampion,
+ CliFrontierVerdict::ArchiveDeadEnd => Self::ArchiveDeadEnd,
+ CliFrontierVerdict::NeedsMoreEvidence => Self::NeedsMoreEvidence,
+ }
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/catalog.rs b/crates/fidget-spinner-cli/src/mcp/catalog.rs
new file mode 100644
index 0000000..178b980
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/catalog.rs
@@ -0,0 +1,541 @@
+use serde_json::{Value, json};
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) enum DispatchTarget {
+ Host,
+ Worker,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) enum ReplayContract {
+ SafeReplay,
+ NeverReplay,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) struct ToolSpec {
+ pub name: &'static str,
+ pub description: &'static str,
+ pub dispatch: DispatchTarget,
+ pub replay: ReplayContract,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub(crate) struct ResourceSpec {
+ pub uri: &'static str,
+ pub dispatch: DispatchTarget,
+ pub replay: ReplayContract,
+}
+
+impl ToolSpec {
+ #[must_use]
+ pub fn annotation_json(self) -> Value {
+ json!({
+ "title": self.name,
+ "readOnlyHint": self.replay == ReplayContract::SafeReplay,
+ "destructiveHint": self.replay == ReplayContract::NeverReplay,
+ "fidgetSpinner": {
+ "dispatch": match self.dispatch {
+ DispatchTarget::Host => "host",
+ DispatchTarget::Worker => "worker",
+ },
+ "replayContract": match self.replay {
+ ReplayContract::SafeReplay => "safe_replay",
+ ReplayContract::NeverReplay => "never_replay",
+ },
+ }
+ })
+ }
+}
+
+#[must_use]
+pub(crate) fn tool_spec(name: &str) -> Option<ToolSpec> {
+ match name {
+ "project.bind" => Some(ToolSpec {
+ name: "project.bind",
+ description: "Bind this MCP session to a project root or nested path inside a project store.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "project.status" => Some(ToolSpec {
+ name: "project.status",
+ description: "Read local project status, store paths, and git availability for the currently bound project.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "project.schema" => Some(ToolSpec {
+ name: "project.schema",
+ description: "Read the project-local payload schema and field validation tiers.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "frontier.list" => Some(ToolSpec {
+ name: "frontier.list",
+ description: "List frontiers for the current project.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "frontier.status" => Some(ToolSpec {
+ name: "frontier.status",
+ description: "Read one frontier projection, including champion and active candidates.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "frontier.init" => Some(ToolSpec {
+ name: "frontier.init",
+ description: "Create a new frontier rooted in a contract node. If the project is a git repo, the current HEAD becomes the initial champion when possible.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "node.create" => Some(ToolSpec {
+ name: "node.create",
+ description: "Create a generic DAG node with project payload fields and optional lineage parents.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "change.record" => Some(ToolSpec {
+ name: "change.record",
+ description: "Record a core-path change hypothesis with low ceremony.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "node.list" => Some(ToolSpec {
+ name: "node.list",
+ description: "List recent nodes. Archived nodes are hidden unless explicitly requested.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "node.read" => Some(ToolSpec {
+ name: "node.read",
+ description: "Read one node including payload, diagnostics, and hidden annotations.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "node.annotate" => Some(ToolSpec {
+ name: "node.annotate",
+ description: "Attach a free-form annotation to any node.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "node.archive" => Some(ToolSpec {
+ name: "node.archive",
+ description: "Archive a node so it falls out of default enumeration without being deleted.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "note.quick" => Some(ToolSpec {
+ name: "note.quick",
+ description: "Push a quick off-path note without bureaucratic experiment closure.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "research.record" => Some(ToolSpec {
+ name: "research.record",
+ description: "Record off-path research or enabling work that should live in the DAG but not on the bureaucratic core path.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "experiment.close" => Some(ToolSpec {
+ name: "experiment.close",
+ description: "Atomically close a core-path experiment with candidate checkpoint capture, measured result, note, and verdict.",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::NeverReplay,
+ }),
+ "skill.list" => Some(ToolSpec {
+ name: "skill.list",
+ description: "List bundled skills shipped with this package.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "skill.show" => Some(ToolSpec {
+ name: "skill.show",
+ description: "Return one bundled skill text shipped with this package. Defaults to `fidget-spinner` when name is omitted.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "system.health" => Some(ToolSpec {
+ name: "system.health",
+ description: "Read MCP host health, session binding, worker generation, rollout state, and the last fault.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "system.telemetry" => Some(ToolSpec {
+ name: "system.telemetry",
+ description: "Read aggregate request, retry, restart, and per-operation telemetry for this MCP session.",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ _ => None,
+ }
+}
+
+#[must_use]
+pub(crate) fn resource_spec(uri: &str) -> Option<ResourceSpec> {
+ match uri {
+ "fidget-spinner://project/config" => Some(ResourceSpec {
+ uri: "fidget-spinner://project/config",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "fidget-spinner://project/schema" => Some(ResourceSpec {
+ uri: "fidget-spinner://project/schema",
+ dispatch: DispatchTarget::Worker,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "fidget-spinner://skill/fidget-spinner" => Some(ResourceSpec {
+ uri: "fidget-spinner://skill/fidget-spinner",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ "fidget-spinner://skill/frontier-loop" => Some(ResourceSpec {
+ uri: "fidget-spinner://skill/frontier-loop",
+ dispatch: DispatchTarget::Host,
+ replay: ReplayContract::SafeReplay,
+ }),
+ _ => None,
+ }
+}
+
+#[must_use]
+pub(crate) fn tool_definitions() -> Vec<Value> {
+ [
+ "project.bind",
+ "project.status",
+ "project.schema",
+ "frontier.list",
+ "frontier.status",
+ "frontier.init",
+ "node.create",
+ "change.record",
+ "node.list",
+ "node.read",
+ "node.annotate",
+ "node.archive",
+ "note.quick",
+ "research.record",
+ "experiment.close",
+ "skill.list",
+ "skill.show",
+ "system.health",
+ "system.telemetry",
+ ]
+ .into_iter()
+ .filter_map(tool_spec)
+ .map(|spec| {
+ json!({
+ "name": spec.name,
+ "description": spec.description,
+ "inputSchema": input_schema(spec.name),
+ "annotations": spec.annotation_json(),
+ })
+ })
+ .collect()
+}
+
+#[must_use]
+pub(crate) fn list_resources() -> Vec<Value> {
+ vec![
+ json!({
+ "uri": "fidget-spinner://project/config",
+ "name": "project-config",
+ "description": "Project-local store configuration",
+ "mimeType": "application/json"
+ }),
+ json!({
+ "uri": "fidget-spinner://project/schema",
+ "name": "project-schema",
+ "description": "Project-local payload schema and validation tiers",
+ "mimeType": "application/json"
+ }),
+ json!({
+ "uri": "fidget-spinner://skill/fidget-spinner",
+ "name": "fidget-spinner-skill",
+ "description": "Bundled base Fidget Spinner skill text for this package",
+ "mimeType": "text/markdown"
+ }),
+ json!({
+ "uri": "fidget-spinner://skill/frontier-loop",
+ "name": "frontier-loop-skill",
+ "description": "Bundled frontier-loop specialization skill text for this package",
+ "mimeType": "text/markdown"
+ }),
+ ]
+}
+
+fn input_schema(name: &str) -> Value {
+ match name {
+ "project.status" | "project.schema" | "skill.list" | "system.health"
+ | "system.telemetry" => json!({"type":"object","additionalProperties":false}),
+ "project.bind" => json!({
+ "type": "object",
+ "properties": {
+ "path": { "type": "string", "description": "Project root or any nested path inside a project with .fidget_spinner state." }
+ },
+ "required": ["path"],
+ "additionalProperties": false
+ }),
+ "skill.show" => json!({
+ "type": "object",
+ "properties": {
+ "name": { "type": "string", "description": "Bundled skill name. Defaults to `fidget-spinner`." }
+ },
+ "additionalProperties": false
+ }),
+ "frontier.list" => json!({"type":"object","additionalProperties":false}),
+ "frontier.status" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string", "description": "Frontier UUID" }
+ },
+ "required": ["frontier_id"],
+ "additionalProperties": false
+ }),
+ "frontier.init" => json!({
+ "type": "object",
+ "properties": {
+ "label": { "type": "string" },
+ "objective": { "type": "string" },
+ "contract_title": { "type": "string" },
+ "contract_summary": { "type": "string" },
+ "benchmark_suites": { "type": "array", "items": { "type": "string" } },
+ "promotion_criteria": { "type": "array", "items": { "type": "string" } },
+ "primary_metric": metric_spec_schema(),
+ "supporting_metrics": { "type": "array", "items": metric_spec_schema() },
+ "seed_summary": { "type": "string" }
+ },
+ "required": ["label", "objective", "contract_title", "benchmark_suites", "promotion_criteria", "primary_metric"],
+ "additionalProperties": false
+ }),
+ "node.create" => json!({
+ "type": "object",
+ "properties": {
+ "class": node_class_schema(),
+ "frontier_id": { "type": "string" },
+ "title": { "type": "string" },
+ "summary": { "type": "string" },
+ "payload": { "type": "object" },
+ "annotations": { "type": "array", "items": annotation_schema() },
+ "parents": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["class", "title"],
+ "additionalProperties": false
+ }),
+ "change.record" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "title": { "type": "string" },
+ "summary": { "type": "string" },
+ "body": { "type": "string" },
+ "hypothesis": { "type": "string" },
+ "base_checkpoint_id": { "type": "string" },
+ "benchmark_suite": { "type": "string" },
+ "annotations": { "type": "array", "items": annotation_schema() },
+ "parents": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["frontier_id", "title", "body"],
+ "additionalProperties": false
+ }),
+ "node.list" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "class": node_class_schema(),
+ "include_archived": { "type": "boolean" },
+ "limit": { "type": "integer", "minimum": 1, "maximum": 500 }
+ },
+ "additionalProperties": false
+ }),
+ "node.read" | "node.archive" => json!({
+ "type": "object",
+ "properties": {
+ "node_id": { "type": "string" }
+ },
+ "required": ["node_id"],
+ "additionalProperties": false
+ }),
+ "node.annotate" => json!({
+ "type": "object",
+ "properties": {
+ "node_id": { "type": "string" },
+ "body": { "type": "string" },
+ "label": { "type": "string" },
+ "visible": { "type": "boolean" }
+ },
+ "required": ["node_id", "body"],
+ "additionalProperties": false
+ }),
+ "note.quick" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "title": { "type": "string" },
+ "body": { "type": "string" },
+ "annotations": { "type": "array", "items": annotation_schema() },
+ "parents": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["title", "body"],
+ "additionalProperties": false
+ }),
+ "research.record" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "title": { "type": "string" },
+ "summary": { "type": "string" },
+ "body": { "type": "string" },
+ "annotations": { "type": "array", "items": annotation_schema() },
+ "parents": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["title", "body"],
+ "additionalProperties": false
+ }),
+ "experiment.close" => json!({
+ "type": "object",
+ "properties": {
+ "frontier_id": { "type": "string" },
+ "base_checkpoint_id": { "type": "string" },
+ "change_node_id": { "type": "string" },
+ "candidate_summary": { "type": "string" },
+ "run": run_schema(),
+ "primary_metric": metric_observation_schema(),
+ "supporting_metrics": { "type": "array", "items": metric_observation_schema() },
+ "note": note_schema(),
+ "verdict": verdict_schema(),
+ "decision_title": { "type": "string" },
+ "decision_rationale": { "type": "string" },
+ "analysis_node_id": { "type": "string" }
+ },
+ "required": [
+ "frontier_id",
+ "base_checkpoint_id",
+ "change_node_id",
+ "candidate_summary",
+ "run",
+ "primary_metric",
+ "note",
+ "verdict",
+ "decision_title",
+ "decision_rationale"
+ ],
+ "additionalProperties": false
+ }),
+ _ => json!({"type":"object","additionalProperties":false}),
+ }
+}
+
+fn metric_spec_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "key": { "type": "string" },
+ "unit": metric_unit_schema(),
+ "objective": optimization_objective_schema()
+ },
+ "required": ["key", "unit", "objective"],
+ "additionalProperties": false
+ })
+}
+
+fn metric_observation_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "key": { "type": "string" },
+ "unit": metric_unit_schema(),
+ "objective": optimization_objective_schema(),
+ "value": { "type": "number" }
+ },
+ "required": ["key", "unit", "objective", "value"],
+ "additionalProperties": false
+ })
+}
+
+fn annotation_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "body": { "type": "string" },
+ "label": { "type": "string" },
+ "visible": { "type": "boolean" }
+ },
+ "required": ["body"],
+ "additionalProperties": false
+ })
+}
+
+fn node_class_schema() -> Value {
+ json!({
+ "type": "string",
+ "enum": ["contract", "change", "run", "analysis", "decision", "research", "enabling", "note"]
+ })
+}
+
+fn metric_unit_schema() -> Value {
+ json!({
+ "type": "string",
+ "enum": ["seconds", "bytes", "count", "ratio", "custom"]
+ })
+}
+
+fn optimization_objective_schema() -> Value {
+ json!({
+ "type": "string",
+ "enum": ["minimize", "maximize", "target"]
+ })
+}
+
+fn verdict_schema() -> Value {
+ json!({
+ "type": "string",
+ "enum": [
+ "promote_to_champion",
+ "keep_on_frontier",
+ "revert_to_champion",
+ "archive_dead_end",
+ "needs_more_evidence"
+ ]
+ })
+}
+
+fn run_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "title": { "type": "string" },
+ "summary": { "type": "string" },
+ "backend": {
+ "type": "string",
+ "enum": ["local_process", "worktree_process", "ssh_process"]
+ },
+ "benchmark_suite": { "type": "string" },
+ "command": {
+ "type": "object",
+ "properties": {
+ "working_directory": { "type": "string" },
+ "argv": { "type": "array", "items": { "type": "string" } },
+ "env": {
+ "type": "object",
+ "additionalProperties": { "type": "string" }
+ }
+ },
+ "required": ["argv"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["title", "backend", "benchmark_suite", "command"],
+ "additionalProperties": false
+ })
+}
+
+fn note_schema() -> Value {
+ json!({
+ "type": "object",
+ "properties": {
+ "summary": { "type": "string" },
+ "next_hypotheses": { "type": "array", "items": { "type": "string" } }
+ },
+ "required": ["summary"],
+ "additionalProperties": false
+ })
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/fault.rs b/crates/fidget-spinner-cli/src/mcp/fault.rs
new file mode 100644
index 0000000..e9d1fce
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/fault.rs
@@ -0,0 +1,99 @@
+use serde::{Deserialize, Serialize};
+use serde_json::{Value, json};
+use time::OffsetDateTime;
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) enum FaultKind {
+ InvalidInput,
+ NotInitialized,
+ Unavailable,
+ Transient,
+ Internal,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) enum FaultStage {
+ Host,
+ Worker,
+ Store,
+ Transport,
+ Protocol,
+ Rollout,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct FaultRecord {
+ pub kind: FaultKind,
+ pub stage: FaultStage,
+ pub operation: String,
+ pub message: String,
+ pub retryable: bool,
+ pub retried: bool,
+ pub worker_generation: Option<u64>,
+ pub occurred_at: OffsetDateTime,
+}
+
+impl FaultRecord {
+ #[must_use]
+ pub fn new(
+ kind: FaultKind,
+ stage: FaultStage,
+ operation: impl Into<String>,
+ message: impl Into<String>,
+ ) -> Self {
+ Self {
+ kind,
+ stage,
+ operation: operation.into(),
+ message: message.into(),
+ retryable: false,
+ retried: false,
+ worker_generation: None,
+ occurred_at: OffsetDateTime::now_utc(),
+ }
+ }
+
+ #[must_use]
+ pub fn retryable(mut self, worker_generation: Option<u64>) -> Self {
+ self.retryable = true;
+ self.worker_generation = worker_generation;
+ self
+ }
+
+ #[must_use]
+ pub fn mark_retried(mut self) -> Self {
+ self.retried = true;
+ self
+ }
+
+ #[must_use]
+ pub fn into_jsonrpc_error(self) -> Value {
+ json!({
+ "code": self.jsonrpc_code(),
+ "message": self.message.clone(),
+ "data": self,
+ })
+ }
+
+ #[must_use]
+ pub fn into_tool_result(self) -> Value {
+ json!({
+ "content": [{
+ "type": "text",
+ "text": self.message,
+ }],
+ "structuredContent": self,
+ "isError": true,
+ })
+ }
+
+ #[must_use]
+ pub const fn jsonrpc_code(&self) -> i64 {
+ match self.kind {
+ FaultKind::InvalidInput => -32602,
+ FaultKind::NotInitialized => -32002,
+ FaultKind::Unavailable => -32004,
+ FaultKind::Transient | FaultKind::Internal => -32603,
+ }
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/host/binary.rs b/crates/fidget-spinner-cli/src/mcp/host/binary.rs
new file mode 100644
index 0000000..2107461
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/binary.rs
@@ -0,0 +1,43 @@
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+
+use fidget_spinner_store_sqlite::StoreError;
+
+use crate::mcp::protocol::BinaryFingerprint;
+
+pub(super) struct BinaryRuntime {
+ pub(super) path: PathBuf,
+ startup_fingerprint: BinaryFingerprint,
+ pub(super) launch_path_stable: bool,
+}
+
+impl BinaryRuntime {
+ pub(super) fn new(path: PathBuf) -> Result<Self, StoreError> {
+ let startup_fingerprint = fingerprint_binary(&path)?;
+ Ok(Self {
+ launch_path_stable: !path
+ .components()
+ .any(|component| component.as_os_str().to_string_lossy() == "target"),
+ path,
+ startup_fingerprint,
+ })
+ }
+
+ pub(super) fn rollout_pending(&self) -> Result<bool, StoreError> {
+ Ok(fingerprint_binary(&self.path)? != self.startup_fingerprint)
+ }
+}
+
+fn fingerprint_binary(path: &Path) -> Result<BinaryFingerprint, StoreError> {
+ let metadata = fs::metadata(path)?;
+ let modified_unix_nanos = metadata
+ .modified()?
+ .duration_since(std::time::UNIX_EPOCH)
+ .map_err(|error| io::Error::other(format!("invalid binary mtime: {error}")))?
+ .as_nanos();
+ Ok(BinaryFingerprint {
+ length_bytes: metadata.len(),
+ modified_unix_nanos,
+ })
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/host/config.rs b/crates/fidget-spinner-cli/src/mcp/host/config.rs
new file mode 100644
index 0000000..8d1ee4b
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/config.rs
@@ -0,0 +1,18 @@
+use std::path::PathBuf;
+
+use fidget_spinner_store_sqlite::StoreError;
+
+#[derive(Clone, Debug)]
+pub(super) struct HostConfig {
+ pub(super) executable: PathBuf,
+ pub(super) initial_project: Option<PathBuf>,
+}
+
+impl HostConfig {
+ pub(super) fn new(initial_project: Option<PathBuf>) -> Result<Self, StoreError> {
+ Ok(Self {
+ executable: std::env::current_exe()?,
+ initial_project,
+ })
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/host/mod.rs b/crates/fidget-spinner-cli/src/mcp/host/mod.rs
new file mode 100644
index 0000000..21c19ec
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/mod.rs
@@ -0,0 +1,8 @@
+//! Stable host process that owns the MCP session and routes store work to a disposable worker.
+
+mod binary;
+mod config;
+mod process;
+mod runtime;
+
+pub(crate) use runtime::run_host as serve;
diff --git a/crates/fidget-spinner-cli/src/mcp/host/process.rs b/crates/fidget-spinner-cli/src/mcp/host/process.rs
new file mode 100644
index 0000000..d4cbb4b
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/process.rs
@@ -0,0 +1,246 @@
+use std::io::{BufRead, BufReader, BufWriter, Write};
+use std::path::PathBuf;
+use std::process::{Child, ChildStdin, ChildStdout, Command, Stdio};
+
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+use crate::mcp::protocol::{
+ HostRequestId, WorkerOperation, WorkerOutcome, WorkerRequest, WorkerResponse, WorkerSpawnConfig,
+};
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(super) struct ProjectBinding {
+ pub(super) requested_path: PathBuf,
+ pub(super) project_root: PathBuf,
+}
+
+pub(super) struct WorkerSupervisor {
+ config: WorkerSpawnConfig,
+ generation: u64,
+ crash_before_reply_once: bool,
+ bound_project_root: Option<PathBuf>,
+ child: Option<Child>,
+ stdin: Option<BufWriter<ChildStdin>>,
+ stdout: Option<BufReader<ChildStdout>>,
+}
+
+impl WorkerSupervisor {
+ pub(super) fn new(config: WorkerSpawnConfig, generation: u64) -> Self {
+ Self {
+ config,
+ generation,
+ crash_before_reply_once: false,
+ bound_project_root: None,
+ child: None,
+ stdin: None,
+ stdout: None,
+ }
+ }
+
+ pub(super) fn generation(&self) -> u64 {
+ self.generation
+ }
+
+ pub(super) fn rebind(&mut self, project_root: PathBuf) {
+ if self
+ .bound_project_root
+ .as_ref()
+ .is_some_and(|current| current == &project_root)
+ {
+ return;
+ }
+ self.kill_current_worker();
+ self.bound_project_root = Some(project_root);
+ }
+
+ pub(super) fn execute(
+ &mut self,
+ request_id: HostRequestId,
+ operation: WorkerOperation,
+ ) -> Result<Value, FaultRecord> {
+ self.ensure_worker()?;
+ let request = WorkerRequest::Execute {
+ id: request_id,
+ operation,
+ };
+ let stdin = self.stdin.as_mut().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.stdin",
+ "worker stdin is not available",
+ )
+ .retryable(Some(self.generation))
+ })?;
+ serde_json::to_writer(&mut *stdin, &request).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.write",
+ format!("failed to write worker request: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ stdin.write_all(b"\n").map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.write",
+ format!("failed to frame worker request: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ stdin.flush().map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.write",
+ format!("failed to flush worker request: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+
+ if self.crash_before_reply_once {
+ self.crash_before_reply_once = false;
+ self.kill_current_worker();
+ return Err(FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.read",
+ "worker crashed before replying",
+ )
+ .retryable(Some(self.generation)));
+ }
+
+ let stdout = self.stdout.as_mut().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.stdout",
+ "worker stdout is not available",
+ )
+ .retryable(Some(self.generation))
+ })?;
+ let mut line = String::new();
+ let bytes = stdout.read_line(&mut line).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.read",
+ format!("failed to read worker response: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ if bytes == 0 {
+ self.kill_current_worker();
+ return Err(FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.read",
+ "worker exited before replying",
+ )
+ .retryable(Some(self.generation)));
+ }
+ let response = serde_json::from_str::<WorkerResponse>(&line).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Protocol,
+ "worker.read",
+ format!("invalid worker response: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ match response.outcome {
+ WorkerOutcome::Success { result } => Ok(result),
+ WorkerOutcome::Fault { fault } => Err(fault),
+ }
+ }
+
+ pub(super) fn restart(&mut self) -> Result<(), FaultRecord> {
+ self.kill_current_worker();
+ self.ensure_worker()
+ }
+
+ pub(super) fn is_alive(&mut self) -> bool {
+ let Some(child) = self.child.as_mut() else {
+ return false;
+ };
+ if let Ok(None) = child.try_wait() {
+ true
+ } else {
+ self.child = None;
+ self.stdin = None;
+ self.stdout = None;
+ false
+ }
+ }
+
+ pub(super) fn arm_crash_once(&mut self) {
+ self.crash_before_reply_once = true;
+ }
+
+ fn ensure_worker(&mut self) -> Result<(), FaultRecord> {
+ if self.is_alive() {
+ return Ok(());
+ }
+ let Some(project_root) = self.bound_project_root.as_ref() else {
+ return Err(FaultRecord::new(
+ FaultKind::Unavailable,
+ FaultStage::Host,
+ "worker.spawn",
+ "project is not bound; call project.bind before using project tools",
+ ));
+ };
+ self.generation += 1;
+ let mut child = Command::new(&self.config.executable)
+ .arg("mcp")
+ .arg("worker")
+ .arg("--project")
+ .arg(project_root)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .spawn()
+ .map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Transport,
+ "worker.spawn",
+ format!("failed to spawn worker: {error}"),
+ )
+ .retryable(Some(self.generation))
+ })?;
+ let stdin = child.stdin.take().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Transport,
+ "worker.spawn",
+ "worker stdin pipe was not created",
+ )
+ })?;
+ let stdout = child.stdout.take().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Transport,
+ "worker.spawn",
+ "worker stdout pipe was not created",
+ )
+ })?;
+ self.child = Some(child);
+ self.stdin = Some(BufWriter::new(stdin));
+ self.stdout = Some(BufReader::new(stdout));
+ Ok(())
+ }
+
+ fn kill_current_worker(&mut self) {
+ if let Some(child) = self.child.as_mut() {
+ let _ = child.kill();
+ let _ = child.wait();
+ }
+ self.child = None;
+ self.stdin = None;
+ self.stdout = None;
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/host/runtime.rs b/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
new file mode 100644
index 0000000..dd75544
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
@@ -0,0 +1,719 @@
+use std::io::{self, BufRead, Write};
+#[cfg(unix)]
+use std::os::unix::process::CommandExt;
+use std::path::PathBuf;
+use std::process::Command;
+use std::time::Instant;
+
+use serde::Serialize;
+use serde_json::{Value, json};
+
+use super::{
+ binary::BinaryRuntime,
+ config::HostConfig,
+ process::{ProjectBinding, WorkerSupervisor},
+};
+use crate::mcp::catalog::{
+ DispatchTarget, ReplayContract, list_resources, resource_spec, tool_definitions, tool_spec,
+};
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+use crate::mcp::protocol::{
+ CRASH_ONCE_ENV, FORCE_ROLLOUT_ENV, HOST_STATE_ENV, HostRequestId, HostStateSeed,
+ PROTOCOL_VERSION, ProjectBindingSeed, SERVER_NAME, SessionSeed, WorkerOperation,
+ WorkerSpawnConfig,
+};
+use crate::mcp::telemetry::{
+ BinaryHealth, BindingHealth, HealthSnapshot, InitializationHealth, ServerTelemetry,
+ WorkerHealth,
+};
+
+pub(crate) fn run_host(
+ initial_project: Option<PathBuf>,
+) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let stdin = io::stdin();
+ let mut stdout = io::stdout().lock();
+ let mut host = HostRuntime::new(HostConfig::new(initial_project)?)?;
+
+ for line in stdin.lock().lines() {
+ let line = match line {
+ Ok(line) => line,
+ Err(error) => {
+ eprintln!("mcp stdin failure: {error}");
+ continue;
+ }
+ };
+ if line.trim().is_empty() {
+ continue;
+ }
+
+ let maybe_response = host.handle_line(&line);
+ if let Some(response) = maybe_response {
+ write_message(&mut stdout, &response)?;
+ }
+ host.maybe_roll_forward()?;
+ }
+
+ Ok(())
+}
+
+struct HostRuntime {
+ config: HostConfig,
+ binding: Option<ProjectBinding>,
+ session: SessionSeed,
+ telemetry: ServerTelemetry,
+ next_request_id: u64,
+ worker: WorkerSupervisor,
+ binary: BinaryRuntime,
+ force_rollout_key: Option<String>,
+ force_rollout_consumed: bool,
+ rollout_requested: bool,
+ crash_once_key: Option<String>,
+ crash_once_consumed: bool,
+}
+
+impl HostRuntime {
+ fn new(config: HostConfig) -> Result<Self, fidget_spinner_store_sqlite::StoreError> {
+ let restored = restore_host_state();
+ let session = restored
+ .as_ref()
+ .map_or_else(SessionSeed::default, |seed| seed.session.clone());
+ let telemetry = restored
+ .as_ref()
+ .map_or_else(ServerTelemetry::default, |seed| seed.telemetry.clone());
+ let next_request_id = restored
+ .as_ref()
+ .map_or(1, |seed| seed.next_request_id.max(1));
+ let worker_generation = restored.as_ref().map_or(0, |seed| seed.worker_generation);
+ let force_rollout_consumed = restored
+ .as_ref()
+ .is_some_and(|seed| seed.force_rollout_consumed);
+ let crash_once_consumed = restored
+ .as_ref()
+ .is_some_and(|seed| seed.crash_once_consumed);
+ let binding = restored
+ .as_ref()
+ .and_then(|seed| seed.binding.clone().map(ProjectBinding::from))
+ .or(config
+ .initial_project
+ .clone()
+ .map(resolve_project_binding)
+ .transpose()?
+ .map(|resolved| resolved.binding));
+
+ let worker = {
+ let mut worker = WorkerSupervisor::new(
+ WorkerSpawnConfig {
+ executable: config.executable.clone(),
+ },
+ worker_generation,
+ );
+ if let Some(project_root) = binding.as_ref().map(|binding| binding.project_root.clone())
+ {
+ worker.rebind(project_root);
+ }
+ worker
+ };
+
+ Ok(Self {
+ config: config.clone(),
+ binding,
+ session,
+ telemetry,
+ next_request_id,
+ worker,
+ binary: BinaryRuntime::new(config.executable.clone())?,
+ force_rollout_key: std::env::var(FORCE_ROLLOUT_ENV).ok(),
+ force_rollout_consumed,
+ rollout_requested: false,
+ crash_once_key: std::env::var(CRASH_ONCE_ENV).ok(),
+ crash_once_consumed,
+ })
+ }
+
+ fn handle_line(&mut self, line: &str) -> Option<Value> {
+ let message = match serde_json::from_str::<Value>(line) {
+ Ok(message) => message,
+ Err(error) => {
+ return Some(jsonrpc_error(
+ Value::Null,
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ "jsonrpc.parse",
+ format!("parse error: {error}"),
+ ),
+ ));
+ }
+ };
+ self.handle_message(message)
+ }
+
+ fn handle_message(&mut self, message: Value) -> Option<Value> {
+ let Some(object) = message.as_object() else {
+ return Some(jsonrpc_error(
+ Value::Null,
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ "jsonrpc.message",
+ "invalid request: expected JSON object",
+ ),
+ ));
+ };
+
+ let method = object.get("method").and_then(Value::as_str)?;
+ let id = object.get("id").cloned();
+ let params = object.get("params").cloned().unwrap_or_else(|| json!({}));
+ let operation_key = operation_key(method, &params);
+ let started_at = Instant::now();
+
+ self.telemetry.record_request(&operation_key);
+ let response = match self.dispatch(method, params, id.clone()) {
+ Ok(Some(result)) => {
+ self.telemetry
+ .record_success(&operation_key, started_at.elapsed().as_millis());
+ id.map(|id| jsonrpc_result(id, result))
+ }
+ Ok(None) => {
+ self.telemetry
+ .record_success(&operation_key, started_at.elapsed().as_millis());
+ None
+ }
+ Err(fault) => {
+ self.telemetry.record_error(
+ &operation_key,
+ fault.clone(),
+ started_at.elapsed().as_millis(),
+ );
+ Some(match id {
+ Some(id) => match method {
+ "tools/call" => jsonrpc_result(id, fault.into_tool_result()),
+ _ => jsonrpc_error(id, fault),
+ },
+ None => jsonrpc_error(Value::Null, fault),
+ })
+ }
+ };
+
+ if self.should_force_rollout(&operation_key) {
+ self.force_rollout_consumed = true;
+ self.telemetry.record_rollout();
+ self.rollout_requested = true;
+ }
+
+ response
+ }
+
+ fn dispatch(
+ &mut self,
+ method: &str,
+ params: Value,
+ request_id: Option<Value>,
+ ) -> Result<Option<Value>, FaultRecord> {
+ match method {
+ "initialize" => {
+ self.session.initialize_params = Some(params.clone());
+ self.session.initialized = false;
+ Ok(Some(json!({
+ "protocolVersion": PROTOCOL_VERSION,
+ "capabilities": {
+ "tools": { "listChanged": false },
+ "resources": { "listChanged": false, "subscribe": false }
+ },
+ "serverInfo": {
+ "name": SERVER_NAME,
+ "version": env!("CARGO_PKG_VERSION")
+ },
+ "instructions": "The DAG is canonical truth. Frontier state is derived. Bind the session with project.bind before project-local DAG operations when the MCP is running unbound."
+ })))
+ }
+ "notifications/initialized" => {
+ if self.session.initialize_params.is_none() {
+ return Err(FaultRecord::new(
+ FaultKind::NotInitialized,
+ FaultStage::Host,
+ "notifications/initialized",
+ "received initialized notification before initialize",
+ ));
+ }
+ self.session.initialized = true;
+ Ok(None)
+ }
+ "notifications/cancelled" => Ok(None),
+ "ping" => Ok(Some(json!({}))),
+ other => {
+ self.require_initialized(other)?;
+ match other {
+ "tools/list" => Ok(Some(json!({ "tools": tool_definitions() }))),
+ "resources/list" => Ok(Some(json!({ "resources": list_resources() }))),
+ "tools/call" => Ok(Some(self.dispatch_tool_call(params, request_id)?)),
+ "resources/read" => Ok(Some(self.dispatch_resource_read(params)?)),
+ _ => Err(FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ other,
+ format!("method `{other}` is not implemented"),
+ )),
+ }
+ }
+ }
+ }
+
+ fn dispatch_tool_call(
+ &mut self,
+ params: Value,
+ _request_id: Option<Value>,
+ ) -> Result<Value, FaultRecord> {
+ let envelope = deserialize::<ToolCallEnvelope>(params, "tools/call")?;
+ let spec = tool_spec(&envelope.name).ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ format!("tools/call:{}", envelope.name),
+ format!("unknown tool `{}`", envelope.name),
+ )
+ })?;
+ match spec.dispatch {
+ DispatchTarget::Host => self.handle_host_tool(&envelope.name, envelope.arguments),
+ DispatchTarget::Worker => self.dispatch_worker_tool(spec, envelope.arguments),
+ }
+ }
+
+ fn dispatch_resource_read(&mut self, params: Value) -> Result<Value, FaultRecord> {
+ let args = deserialize::<ReadResourceArgs>(params, "resources/read")?;
+ let spec = resource_spec(&args.uri).ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ format!("resources/read:{}", args.uri),
+ format!("unknown resource `{}`", args.uri),
+ )
+ })?;
+ match spec.dispatch {
+ DispatchTarget::Host => Ok(Self::handle_host_resource(spec.uri)),
+ DispatchTarget::Worker => self.dispatch_worker_operation(
+ format!("resources/read:{}", args.uri),
+ spec.replay,
+ WorkerOperation::ReadResource { uri: args.uri },
+ ),
+ }
+ }
+
+ fn dispatch_worker_tool(
+ &mut self,
+ spec: crate::mcp::catalog::ToolSpec,
+ arguments: Value,
+ ) -> Result<Value, FaultRecord> {
+ let operation = format!("tools/call:{}", spec.name);
+ self.dispatch_worker_operation(
+ operation.clone(),
+ spec.replay,
+ WorkerOperation::CallTool {
+ name: spec.name.to_owned(),
+ arguments,
+ },
+ )
+ }
+
+ fn dispatch_worker_operation(
+ &mut self,
+ operation: String,
+ replay: ReplayContract,
+ worker_operation: WorkerOperation,
+ ) -> Result<Value, FaultRecord> {
+ let binding = self.require_bound_project(&operation)?;
+ self.worker.rebind(binding.project_root.clone());
+
+ if self.should_crash_worker_once(&operation) {
+ self.worker.arm_crash_once();
+ }
+
+ let request_id = self.allocate_request_id();
+ match self.worker.execute(request_id, worker_operation.clone()) {
+ Ok(result) => Ok(result),
+ Err(fault) => {
+ if replay == ReplayContract::SafeReplay && fault.retryable {
+ self.telemetry.record_retry(&operation);
+ self.telemetry.record_worker_restart();
+ self.worker
+ .restart()
+ .map_err(|restart_fault| restart_fault.mark_retried())?;
+ match self.worker.execute(request_id, worker_operation) {
+ Ok(result) => Ok(result),
+ Err(retry_fault) => Err(retry_fault.mark_retried()),
+ }
+ } else {
+ Err(fault)
+ }
+ }
+ }
+ }
+
+ fn handle_host_tool(&mut self, name: &str, arguments: Value) -> Result<Value, FaultRecord> {
+ match name {
+ "project.bind" => {
+ let args = deserialize::<ProjectBindArgs>(arguments, "tools/call:project.bind")?;
+ let resolved = resolve_project_binding(PathBuf::from(args.path))
+ .map_err(host_store_fault("tools/call:project.bind"))?;
+ self.worker.rebind(resolved.binding.project_root.clone());
+ self.binding = Some(resolved.binding);
+ tool_success(&resolved.status)
+ }
+ "skill.list" => tool_success(&json!({
+ "skills": crate::bundled_skill::bundled_skill_summaries(),
+ })),
+ "skill.show" => {
+ let args = deserialize::<SkillShowArgs>(arguments, "tools/call:skill.show")?;
+ let skill = args.name.as_deref().map_or_else(
+ || Ok(crate::bundled_skill::default_bundled_skill()),
+ |name| {
+ crate::bundled_skill::bundled_skill(name).ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ "tools/call:skill.show",
+ format!("unknown bundled skill `{name}`"),
+ )
+ })
+ },
+ )?;
+ tool_success(&json!({
+ "name": skill.name,
+ "description": skill.description,
+ "resource_uri": skill.resource_uri,
+ "body": skill.body,
+ }))
+ }
+ "system.health" => tool_success(&HealthSnapshot {
+ initialization: InitializationHealth {
+ ready: self.session.initialized,
+ seed_captured: self.session.initialize_params.is_some(),
+ },
+ binding: binding_health(self.binding.as_ref()),
+ worker: WorkerHealth {
+ worker_generation: self.worker.generation(),
+ alive: self.worker.is_alive(),
+ },
+ binary: BinaryHealth {
+ current_executable: self.binary.path.display().to_string(),
+ launch_path_stable: self.binary.launch_path_stable,
+ rollout_pending: self.binary.rollout_pending().unwrap_or(false),
+ },
+ last_fault: self.telemetry.last_fault.clone(),
+ }),
+ "system.telemetry" => tool_success(&self.telemetry),
+ other => Err(FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ format!("tools/call:{other}"),
+ format!("unknown host tool `{other}`"),
+ )),
+ }
+ }
+
+ fn handle_host_resource(uri: &str) -> Value {
+ match uri {
+ "fidget-spinner://skill/fidget-spinner" => {
+ skill_resource(uri, crate::bundled_skill::default_bundled_skill().body)
+ }
+ "fidget-spinner://skill/frontier-loop" => skill_resource(
+ uri,
+ crate::bundled_skill::frontier_loop_bundled_skill().body,
+ ),
+ _ => unreachable!("host resources are catalog-gated"),
+ }
+ }
+
+ fn require_initialized(&self, operation: &str) -> Result<(), FaultRecord> {
+ if self.session.initialized {
+ return Ok(());
+ }
+ Err(FaultRecord::new(
+ FaultKind::NotInitialized,
+ FaultStage::Host,
+ operation,
+ "client must call initialize and notifications/initialized before normal operations",
+ ))
+ }
+
+ fn require_bound_project(&self, operation: &str) -> Result<&ProjectBinding, FaultRecord> {
+ self.binding.as_ref().ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Unavailable,
+ FaultStage::Host,
+ operation,
+ "project is not bound; call project.bind with the target project root or a nested path inside it",
+ )
+ })
+ }
+
+ fn allocate_request_id(&mut self) -> HostRequestId {
+ let id = HostRequestId(self.next_request_id);
+ self.next_request_id += 1;
+ id
+ }
+
+ fn maybe_roll_forward(&mut self) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let binary_pending = self.binary.rollout_pending()?;
+ if !self.rollout_requested && !binary_pending {
+ return Ok(());
+ }
+ if binary_pending && !self.rollout_requested {
+ self.telemetry.record_rollout();
+ }
+ self.roll_forward()
+ }
+
+ fn roll_forward(&mut self) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let state = HostStateSeed {
+ session: self.session.clone(),
+ telemetry: self.telemetry.clone(),
+ next_request_id: self.next_request_id,
+ binding: self.binding.clone().map(ProjectBindingSeed::from),
+ worker_generation: self.worker.generation(),
+ force_rollout_consumed: self.force_rollout_consumed,
+ crash_once_consumed: self.crash_once_consumed,
+ };
+ let serialized = serde_json::to_string(&state)?;
+ let mut command = Command::new(&self.binary.path);
+ let _ = command.arg("mcp").arg("serve");
+ if let Some(project) = self.config.initial_project.as_ref() {
+ let _ = command.arg("--project").arg(project);
+ }
+ let _ = command.env(HOST_STATE_ENV, serialized);
+ #[cfg(unix)]
+ {
+ let error = command.exec();
+ Err(fidget_spinner_store_sqlite::StoreError::Io(error))
+ }
+ #[cfg(not(unix))]
+ {
+ return Err(fidget_spinner_store_sqlite::StoreError::Io(io::Error::new(
+ io::ErrorKind::Unsupported,
+ "host rollout requires unix exec support",
+ )));
+ }
+ }
+
+ fn should_force_rollout(&self, operation: &str) -> bool {
+ self.force_rollout_key
+ .as_deref()
+ .is_some_and(|key| key == operation)
+ && !self.force_rollout_consumed
+ }
+
+ fn should_crash_worker_once(&mut self, operation: &str) -> bool {
+ let should_crash = self
+ .crash_once_key
+ .as_deref()
+ .is_some_and(|key| key == operation)
+ && !self.crash_once_consumed;
+ if should_crash {
+ self.crash_once_consumed = true;
+ }
+ should_crash
+ }
+}
+
+#[derive(Debug, Serialize)]
+struct ProjectBindStatus {
+ requested_path: String,
+ project_root: String,
+ state_root: String,
+ display_name: fidget_spinner_core::NonEmptyText,
+ schema: fidget_spinner_core::PayloadSchemaRef,
+ git_repo_detected: bool,
+}
+
+struct ResolvedProjectBinding {
+ binding: ProjectBinding,
+ status: ProjectBindStatus,
+}
+
+fn resolve_project_binding(
+ requested_path: PathBuf,
+) -> Result<ResolvedProjectBinding, fidget_spinner_store_sqlite::StoreError> {
+ let store = crate::open_store(&requested_path)?;
+ Ok(ResolvedProjectBinding {
+ binding: ProjectBinding {
+ requested_path: requested_path.clone(),
+ project_root: PathBuf::from(store.project_root().as_str()),
+ },
+ status: ProjectBindStatus {
+ requested_path: requested_path.display().to_string(),
+ project_root: store.project_root().to_string(),
+ state_root: store.state_root().to_string(),
+ display_name: store.config().display_name.clone(),
+ schema: store.schema().schema_ref(),
+ git_repo_detected: crate::run_git(
+ store.project_root(),
+ &["rev-parse", "--show-toplevel"],
+ )?
+ .is_some(),
+ },
+ })
+}
+
+fn binding_health(binding: Option<&ProjectBinding>) -> BindingHealth {
+ match binding {
+ Some(binding) => BindingHealth {
+ bound: true,
+ requested_path: Some(binding.requested_path.display().to_string()),
+ project_root: Some(binding.project_root.display().to_string()),
+ state_root: Some(
+ binding
+ .project_root
+ .join(fidget_spinner_store_sqlite::STORE_DIR_NAME)
+ .display()
+ .to_string(),
+ ),
+ },
+ None => BindingHealth {
+ bound: false,
+ requested_path: None,
+ project_root: None,
+ state_root: None,
+ },
+ }
+}
+
+fn skill_resource(uri: &str, body: &str) -> Value {
+ json!({
+ "contents": [{
+ "uri": uri,
+ "mimeType": "text/markdown",
+ "text": body,
+ }]
+ })
+}
+
+impl From<ProjectBindingSeed> for ProjectBinding {
+ fn from(value: ProjectBindingSeed) -> Self {
+ Self {
+ requested_path: value.requested_path,
+ project_root: value.project_root,
+ }
+ }
+}
+
+impl From<ProjectBinding> for ProjectBindingSeed {
+ fn from(value: ProjectBinding) -> Self {
+ Self {
+ requested_path: value.requested_path,
+ project_root: value.project_root,
+ }
+ }
+}
+
+fn restore_host_state() -> Option<HostStateSeed> {
+ let raw = std::env::var(HOST_STATE_ENV).ok()?;
+ serde_json::from_str::<HostStateSeed>(&raw).ok()
+}
+
+fn deserialize<T: for<'de> serde::Deserialize<'de>>(
+ value: Value,
+ operation: &str,
+) -> Result<T, FaultRecord> {
+ serde_json::from_value(value).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ operation,
+ format!("invalid params: {error}"),
+ )
+ })
+}
+
+fn operation_key(method: &str, params: &Value) -> String {
+ match method {
+ "tools/call" => params.get("name").and_then(Value::as_str).map_or_else(
+ || "tools/call".to_owned(),
+ |name| format!("tools/call:{name}"),
+ ),
+ "resources/read" => params.get("uri").and_then(Value::as_str).map_or_else(
+ || "resources/read".to_owned(),
+ |uri| format!("resources/read:{uri}"),
+ ),
+ other => other.to_owned(),
+ }
+}
+
+fn tool_success(value: &impl Serialize) -> Result<Value, FaultRecord> {
+ Ok(json!({
+ "content": [{
+ "type": "text",
+ "text": crate::to_pretty_json(value).map_err(|error| {
+ FaultRecord::new(FaultKind::Internal, FaultStage::Host, "tool_success", error.to_string())
+ })?,
+ }],
+ "structuredContent": serde_json::to_value(value).map_err(|error| {
+ FaultRecord::new(FaultKind::Internal, FaultStage::Host, "tool_success", error.to_string())
+ })?,
+ "isError": false,
+ }))
+}
+
+fn host_store_fault(
+ operation: &'static str,
+) -> impl FnOnce(fidget_spinner_store_sqlite::StoreError) -> FaultRecord {
+ move |error| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Host,
+ operation,
+ error.to_string(),
+ )
+ }
+}
+
+fn jsonrpc_result(id: Value, result: Value) -> Value {
+ json!({
+ "jsonrpc": "2.0",
+ "id": id,
+ "result": result,
+ })
+}
+
+fn jsonrpc_error(id: Value, fault: FaultRecord) -> Value {
+ json!({
+ "jsonrpc": "2.0",
+ "id": id,
+ "error": fault.into_jsonrpc_error(),
+ })
+}
+
+fn write_message(
+ stdout: &mut impl Write,
+ message: &Value,
+) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ serde_json::to_writer(&mut *stdout, message)?;
+ stdout.write_all(b"\n")?;
+ stdout.flush()?;
+ Ok(())
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct ToolCallEnvelope {
+ name: String,
+ #[serde(default = "empty_json_object")]
+ arguments: Value,
+}
+
+fn empty_json_object() -> Value {
+ json!({})
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct ReadResourceArgs {
+ uri: String,
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct ProjectBindArgs {
+ path: String,
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct SkillShowArgs {
+ name: Option<String>,
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/mod.rs b/crates/fidget-spinner-cli/src/mcp/mod.rs
new file mode 100644
index 0000000..adea066
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/mod.rs
@@ -0,0 +1,10 @@
+mod catalog;
+mod fault;
+mod host;
+mod protocol;
+mod service;
+mod telemetry;
+mod worker;
+
+pub(crate) use host::serve;
+pub(crate) use worker::serve as serve_worker;
diff --git a/crates/fidget-spinner-cli/src/mcp/protocol.rs b/crates/fidget-spinner-cli/src/mcp/protocol.rs
new file mode 100644
index 0000000..1f24f37
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/protocol.rs
@@ -0,0 +1,86 @@
+use std::path::PathBuf;
+
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+
+use crate::mcp::telemetry::ServerTelemetry;
+
+pub(crate) const PROTOCOL_VERSION: &str = "2025-11-25";
+pub(crate) const SERVER_NAME: &str = "fidget-spinner";
+pub(crate) const HOST_STATE_ENV: &str = "FIDGET_SPINNER_MCP_HOST_STATE";
+pub(crate) const FORCE_ROLLOUT_ENV: &str = "FIDGET_SPINNER_MCP_TEST_FORCE_ROLLOUT_KEY";
+pub(crate) const CRASH_ONCE_ENV: &str = "FIDGET_SPINNER_MCP_TEST_HOST_CRASH_ONCE_KEY";
+pub(crate) const TRANSIENT_ONCE_ENV: &str = "FIDGET_SPINNER_MCP_TEST_WORKER_TRANSIENT_ONCE_KEY";
+pub(crate) const TRANSIENT_ONCE_MARKER_ENV: &str =
+ "FIDGET_SPINNER_MCP_TEST_WORKER_TRANSIENT_ONCE_MARKER";
+
+#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct SessionSeed {
+ pub initialize_params: Option<Value>,
+ pub initialized: bool,
+}
+
+#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct HostStateSeed {
+ pub session: SessionSeed,
+ pub telemetry: ServerTelemetry,
+ pub next_request_id: u64,
+ pub binding: Option<ProjectBindingSeed>,
+ pub worker_generation: u64,
+ pub force_rollout_consumed: bool,
+ pub crash_once_consumed: bool,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct ProjectBindingSeed {
+ pub requested_path: PathBuf,
+ pub project_root: PathBuf,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+#[serde(transparent)]
+pub(crate) struct HostRequestId(pub u64);
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+#[serde(tag = "kind", rename_all = "snake_case")]
+pub(crate) enum WorkerRequest {
+ Execute {
+ id: HostRequestId,
+ operation: WorkerOperation,
+ },
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+#[serde(tag = "kind", rename_all = "snake_case")]
+pub(crate) enum WorkerOperation {
+ CallTool { name: String, arguments: Value },
+ ReadResource { uri: String },
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct WorkerResponse {
+ pub id: HostRequestId,
+ pub outcome: WorkerOutcome,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+#[serde(tag = "status", rename_all = "snake_case")]
+pub(crate) enum WorkerOutcome {
+ Success {
+ result: Value,
+ },
+ Fault {
+ fault: crate::mcp::fault::FaultRecord,
+ },
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct BinaryFingerprint {
+ pub length_bytes: u64,
+ pub modified_unix_nanos: u128,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct WorkerSpawnConfig {
+ pub executable: PathBuf,
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/service.rs b/crates/fidget-spinner-cli/src/mcp/service.rs
new file mode 100644
index 0000000..a7cae10
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/service.rs
@@ -0,0 +1,813 @@
+use std::collections::BTreeMap;
+use std::fs;
+
+use camino::{Utf8Path, Utf8PathBuf};
+use fidget_spinner_core::{
+ AnnotationVisibility, CodeSnapshotRef, CommandRecipe, ExecutionBackend, FrontierContract,
+ FrontierNote, FrontierVerdict, MetricObservation, MetricSpec, MetricUnit, NodeAnnotation,
+ NodeClass, NodePayload, NonEmptyText,
+};
+use fidget_spinner_store_sqlite::{
+ CloseExperimentRequest, CreateFrontierRequest, CreateNodeRequest, EdgeAttachment,
+ EdgeAttachmentDirection, ListNodesQuery, ProjectStore, StoreError,
+};
+use serde::Deserialize;
+use serde_json::{Map, Value, json};
+
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+use crate::mcp::protocol::{TRANSIENT_ONCE_ENV, TRANSIENT_ONCE_MARKER_ENV, WorkerOperation};
+
+pub(crate) struct WorkerService {
+ store: ProjectStore,
+}
+
+impl WorkerService {
+ pub fn new(project: &Utf8Path) -> Result<Self, StoreError> {
+ Ok(Self {
+ store: crate::open_store(project.as_std_path())?,
+ })
+ }
+
+ pub fn execute(&mut self, operation: WorkerOperation) -> Result<Value, FaultRecord> {
+ let operation_key = match &operation {
+ WorkerOperation::CallTool { name, .. } => format!("tools/call:{name}"),
+ WorkerOperation::ReadResource { uri } => format!("resources/read:{uri}"),
+ };
+ Self::maybe_inject_transient(&operation_key)?;
+
+ match operation {
+ WorkerOperation::CallTool { name, arguments } => self.call_tool(&name, arguments),
+ WorkerOperation::ReadResource { uri } => self.read_resource(&uri),
+ }
+ }
+
+ fn call_tool(&mut self, name: &str, arguments: Value) -> Result<Value, FaultRecord> {
+ match name {
+ "project.status" => tool_success(&json!({
+ "project_root": self.store.project_root(),
+ "state_root": self.store.state_root(),
+ "display_name": self.store.config().display_name,
+ "schema": self.store.schema().schema_ref(),
+ "git_repo_detected": crate::run_git(self.store.project_root(), &["rev-parse", "--show-toplevel"])
+ .map_err(store_fault("tools/call:project.status"))?
+ .is_some(),
+ })),
+ "project.schema" => tool_success(self.store.schema()),
+ "frontier.list" => tool_success(
+ &self
+ .store
+ .list_frontiers()
+ .map_err(store_fault("tools/call:frontier.list"))?,
+ ),
+ "frontier.status" => {
+ let args = deserialize::<FrontierStatusToolArgs>(arguments)?;
+ tool_success(
+ &self
+ .store
+ .frontier_projection(
+ crate::parse_frontier_id(&args.frontier_id)
+ .map_err(store_fault("tools/call:frontier.status"))?,
+ )
+ .map_err(store_fault("tools/call:frontier.status"))?,
+ )
+ }
+ "frontier.init" => {
+ let args = deserialize::<FrontierInitToolArgs>(arguments)?;
+ let initial_checkpoint = self
+ .store
+ .auto_capture_checkpoint(
+ NonEmptyText::new(
+ args.seed_summary
+ .unwrap_or_else(|| "initial champion checkpoint".to_owned()),
+ )
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ )
+ .map_err(store_fault("tools/call:frontier.init"))?;
+ let projection = self
+ .store
+ .create_frontier(CreateFrontierRequest {
+ label: NonEmptyText::new(args.label)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ contract_title: NonEmptyText::new(args.contract_title)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ contract_summary: args
+ .contract_summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ contract: FrontierContract {
+ objective: NonEmptyText::new(args.objective)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ evaluation: fidget_spinner_core::EvaluationProtocol {
+ benchmark_suites: crate::to_text_set(args.benchmark_suites)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ primary_metric: MetricSpec {
+ metric_key: NonEmptyText::new(args.primary_metric.key)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ unit: parse_metric_unit_name(&args.primary_metric.unit)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ objective: crate::parse_optimization_objective(
+ &args.primary_metric.objective,
+ )
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ },
+ supporting_metrics: args
+ .supporting_metrics
+ .into_iter()
+ .map(metric_spec_from_wire)
+ .collect::<Result<_, _>>()
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ },
+ promotion_criteria: crate::to_text_vec(args.promotion_criteria)
+ .map_err(store_fault("tools/call:frontier.init"))?,
+ },
+ initial_checkpoint,
+ })
+ .map_err(store_fault("tools/call:frontier.init"))?;
+ tool_success(&projection)
+ }
+ "node.create" => {
+ let args = deserialize::<NodeCreateToolArgs>(arguments)?;
+ let node = self
+ .store
+ .add_node(CreateNodeRequest {
+ class: parse_node_class_name(&args.class)
+ .map_err(store_fault("tools/call:node.create"))?,
+ frontier_id: args
+ .frontier_id
+ .as_deref()
+ .map(crate::parse_frontier_id)
+ .transpose()
+ .map_err(store_fault("tools/call:node.create"))?,
+ title: NonEmptyText::new(args.title)
+ .map_err(store_fault("tools/call:node.create"))?,
+ summary: args
+ .summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:node.create"))?,
+ payload: NodePayload::with_schema(
+ self.store.schema().schema_ref(),
+ args.payload.unwrap_or_default(),
+ ),
+ annotations: tool_annotations(args.annotations)
+ .map_err(store_fault("tools/call:node.create"))?,
+ attachments: lineage_attachments(args.parents)
+ .map_err(store_fault("tools/call:node.create"))?,
+ })
+ .map_err(store_fault("tools/call:node.create"))?;
+ tool_success(&node)
+ }
+ "change.record" => {
+ let args = deserialize::<ChangeRecordToolArgs>(arguments)?;
+ let mut fields = Map::new();
+ let _ = fields.insert("body".to_owned(), Value::String(args.body));
+ if let Some(hypothesis) = args.hypothesis {
+ let _ = fields.insert("hypothesis".to_owned(), Value::String(hypothesis));
+ }
+ if let Some(base_checkpoint_id) = args.base_checkpoint_id {
+ let _ = fields.insert(
+ "base_checkpoint_id".to_owned(),
+ Value::String(base_checkpoint_id),
+ );
+ }
+ if let Some(benchmark_suite) = args.benchmark_suite {
+ let _ =
+ fields.insert("benchmark_suite".to_owned(), Value::String(benchmark_suite));
+ }
+ let node = self
+ .store
+ .add_node(CreateNodeRequest {
+ class: NodeClass::Change,
+ frontier_id: Some(
+ crate::parse_frontier_id(&args.frontier_id)
+ .map_err(store_fault("tools/call:change.record"))?,
+ ),
+ title: NonEmptyText::new(args.title)
+ .map_err(store_fault("tools/call:change.record"))?,
+ summary: args
+ .summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:change.record"))?,
+ payload: NodePayload::with_schema(self.store.schema().schema_ref(), fields),
+ annotations: tool_annotations(args.annotations)
+ .map_err(store_fault("tools/call:change.record"))?,
+ attachments: lineage_attachments(args.parents)
+ .map_err(store_fault("tools/call:change.record"))?,
+ })
+ .map_err(store_fault("tools/call:change.record"))?;
+ tool_success(&node)
+ }
+ "node.list" => {
+ let args = deserialize::<NodeListToolArgs>(arguments)?;
+ let nodes = self
+ .store
+ .list_nodes(ListNodesQuery {
+ frontier_id: args
+ .frontier_id
+ .as_deref()
+ .map(crate::parse_frontier_id)
+ .transpose()
+ .map_err(store_fault("tools/call:node.list"))?,
+ class: args
+ .class
+ .as_deref()
+ .map(parse_node_class_name)
+ .transpose()
+ .map_err(store_fault("tools/call:node.list"))?,
+ include_archived: args.include_archived,
+ limit: args.limit.unwrap_or(20),
+ })
+ .map_err(store_fault("tools/call:node.list"))?;
+ tool_success(&nodes)
+ }
+ "node.read" => {
+ let args = deserialize::<NodeReadToolArgs>(arguments)?;
+ let node_id = crate::parse_node_id(&args.node_id)
+ .map_err(store_fault("tools/call:node.read"))?;
+ let node = self
+ .store
+ .get_node(node_id)
+ .map_err(store_fault("tools/call:node.read"))?
+ .ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Store,
+ "tools/call:node.read",
+ format!("node {node_id} was not found"),
+ )
+ })?;
+ tool_success(&node)
+ }
+ "node.annotate" => {
+ let args = deserialize::<NodeAnnotateToolArgs>(arguments)?;
+ let annotation = NodeAnnotation {
+ id: fidget_spinner_core::AnnotationId::fresh(),
+ visibility: if args.visible {
+ AnnotationVisibility::Visible
+ } else {
+ AnnotationVisibility::HiddenByDefault
+ },
+ label: args
+ .label
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:node.annotate"))?,
+ body: NonEmptyText::new(args.body)
+ .map_err(store_fault("tools/call:node.annotate"))?,
+ created_at: time::OffsetDateTime::now_utc(),
+ };
+ self.store
+ .annotate_node(
+ crate::parse_node_id(&args.node_id)
+ .map_err(store_fault("tools/call:node.annotate"))?,
+ annotation,
+ )
+ .map_err(store_fault("tools/call:node.annotate"))?;
+ tool_success(&json!({"annotated": args.node_id}))
+ }
+ "node.archive" => {
+ let args = deserialize::<NodeArchiveToolArgs>(arguments)?;
+ self.store
+ .archive_node(
+ crate::parse_node_id(&args.node_id)
+ .map_err(store_fault("tools/call:node.archive"))?,
+ )
+ .map_err(store_fault("tools/call:node.archive"))?;
+ tool_success(&json!({"archived": args.node_id}))
+ }
+ "note.quick" => {
+ let args = deserialize::<QuickNoteToolArgs>(arguments)?;
+ let node = self
+ .store
+ .add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: args
+ .frontier_id
+ .as_deref()
+ .map(crate::parse_frontier_id)
+ .transpose()
+ .map_err(store_fault("tools/call:note.quick"))?,
+ title: NonEmptyText::new(args.title)
+ .map_err(store_fault("tools/call:note.quick"))?,
+ summary: None,
+ payload: NodePayload::with_schema(
+ self.store.schema().schema_ref(),
+ crate::json_object(json!({ "body": args.body }))
+ .map_err(store_fault("tools/call:note.quick"))?,
+ ),
+ annotations: tool_annotations(args.annotations)
+ .map_err(store_fault("tools/call:note.quick"))?,
+ attachments: lineage_attachments(args.parents)
+ .map_err(store_fault("tools/call:note.quick"))?,
+ })
+ .map_err(store_fault("tools/call:note.quick"))?;
+ tool_success(&node)
+ }
+ "research.record" => {
+ let args = deserialize::<ResearchRecordToolArgs>(arguments)?;
+ let node = self
+ .store
+ .add_node(CreateNodeRequest {
+ class: NodeClass::Research,
+ frontier_id: args
+ .frontier_id
+ .as_deref()
+ .map(crate::parse_frontier_id)
+ .transpose()
+ .map_err(store_fault("tools/call:research.record"))?,
+ title: NonEmptyText::new(args.title)
+ .map_err(store_fault("tools/call:research.record"))?,
+ summary: args
+ .summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:research.record"))?,
+ payload: NodePayload::with_schema(
+ self.store.schema().schema_ref(),
+ crate::json_object(json!({ "body": args.body }))
+ .map_err(store_fault("tools/call:research.record"))?,
+ ),
+ annotations: tool_annotations(args.annotations)
+ .map_err(store_fault("tools/call:research.record"))?,
+ attachments: lineage_attachments(args.parents)
+ .map_err(store_fault("tools/call:research.record"))?,
+ })
+ .map_err(store_fault("tools/call:research.record"))?;
+ tool_success(&node)
+ }
+ "experiment.close" => {
+ let args = deserialize::<ExperimentCloseToolArgs>(arguments)?;
+ let frontier_id = crate::parse_frontier_id(&args.frontier_id)
+ .map_err(store_fault("tools/call:experiment.close"))?;
+ let snapshot = self
+ .store
+ .auto_capture_checkpoint(
+ NonEmptyText::new(args.candidate_summary.clone())
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ )
+ .map_err(store_fault("tools/call:experiment.close"))?
+ .map(|seed| seed.snapshot)
+ .ok_or_else(|| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Store,
+ "tools/call:experiment.close",
+ format!(
+ "git repository inspection failed for {}",
+ self.store.project_root()
+ ),
+ )
+ })?;
+ let receipt = self
+ .store
+ .close_experiment(CloseExperimentRequest {
+ frontier_id,
+ base_checkpoint_id: crate::parse_checkpoint_id(&args.base_checkpoint_id)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ change_node_id: crate::parse_node_id(&args.change_node_id)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ candidate_summary: NonEmptyText::new(args.candidate_summary)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ candidate_snapshot: snapshot,
+ run_title: NonEmptyText::new(args.run.title)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ run_summary: args
+ .run
+ .summary
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ backend: parse_backend_name(&args.run.backend)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ benchmark_suite: NonEmptyText::new(args.run.benchmark_suite)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ command: command_recipe_from_wire(
+ args.run.command,
+ self.store.project_root(),
+ )
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ code_snapshot: Some(
+ capture_code_snapshot(self.store.project_root())
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ ),
+ primary_metric: metric_observation_from_wire(args.primary_metric)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ supporting_metrics: args
+ .supporting_metrics
+ .into_iter()
+ .map(metric_observation_from_wire)
+ .collect::<Result<Vec<_>, _>>()
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ note: FrontierNote {
+ summary: NonEmptyText::new(args.note.summary)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ next_hypotheses: crate::to_text_vec(args.note.next_hypotheses)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ },
+ verdict: parse_verdict_name(&args.verdict)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ decision_title: NonEmptyText::new(args.decision_title)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ decision_rationale: NonEmptyText::new(args.decision_rationale)
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ analysis_node_id: args
+ .analysis_node_id
+ .as_deref()
+ .map(crate::parse_node_id)
+ .transpose()
+ .map_err(store_fault("tools/call:experiment.close"))?,
+ })
+ .map_err(store_fault("tools/call:experiment.close"))?;
+ tool_success(&receipt)
+ }
+ other => Err(FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Worker,
+ format!("tools/call:{other}"),
+ format!("unknown tool `{other}`"),
+ )),
+ }
+ }
+
+ fn read_resource(&mut self, uri: &str) -> Result<Value, FaultRecord> {
+ match uri {
+ "fidget-spinner://project/config" => Ok(json!({
+ "contents": [{
+ "uri": uri,
+ "mimeType": "application/json",
+ "text": crate::to_pretty_json(self.store.config())
+ .map_err(store_fault("resources/read:fidget-spinner://project/config"))?,
+ }]
+ })),
+ "fidget-spinner://project/schema" => Ok(json!({
+ "contents": [{
+ "uri": uri,
+ "mimeType": "application/json",
+ "text": crate::to_pretty_json(self.store.schema())
+ .map_err(store_fault("resources/read:fidget-spinner://project/schema"))?,
+ }]
+ })),
+ _ => Err(FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Worker,
+ format!("resources/read:{uri}"),
+ format!("unknown resource `{uri}`"),
+ )),
+ }
+ }
+
+ fn maybe_inject_transient(operation: &str) -> Result<(), FaultRecord> {
+ let Some(target_operation) = std::env::var_os(TRANSIENT_ONCE_ENV) else {
+ return Ok(());
+ };
+ let target_operation = target_operation.to_string_lossy();
+ if target_operation != operation {
+ return Ok(());
+ }
+ let Some(marker_path) = std::env::var_os(TRANSIENT_ONCE_MARKER_ENV) else {
+ return Ok(());
+ };
+ if Utf8PathBuf::from(marker_path.to_string_lossy().into_owned()).exists() {
+ return Ok(());
+ }
+ fs::write(&marker_path, b"triggered").map_err(|error| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Worker,
+ operation,
+ format!("failed to write transient marker: {error}"),
+ )
+ })?;
+ Err(FaultRecord::new(
+ FaultKind::Transient,
+ FaultStage::Worker,
+ operation,
+ format!("injected transient fault for {operation}"),
+ )
+ .retryable(None))
+ }
+}
+
+fn deserialize<T: for<'de> Deserialize<'de>>(value: Value) -> Result<T, FaultRecord> {
+ serde_json::from_value(value).map_err(|error| {
+ FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ "worker.deserialize",
+ format!("invalid params: {error}"),
+ )
+ })
+}
+
+fn tool_success(value: &impl serde::Serialize) -> Result<Value, FaultRecord> {
+ Ok(json!({
+ "content": [{
+ "type": "text",
+ "text": crate::to_pretty_json(value).map_err(store_fault("worker.tool_success"))?,
+ }],
+ "structuredContent": serde_json::to_value(value)
+ .map_err(store_fault("worker.tool_success"))?,
+ "isError": false,
+ }))
+}
+
+fn store_fault<E>(operation: &'static str) -> impl FnOnce(E) -> FaultRecord
+where
+ E: std::fmt::Display,
+{
+ move |error| {
+ FaultRecord::new(
+ classify_fault_kind(&error.to_string()),
+ FaultStage::Store,
+ operation,
+ error.to_string(),
+ )
+ }
+}
+
+fn classify_fault_kind(message: &str) -> FaultKind {
+ if message.contains("was not found")
+ || message.contains("invalid")
+ || message.contains("unknown")
+ || message.contains("empty")
+ {
+ FaultKind::InvalidInput
+ } else {
+ FaultKind::Internal
+ }
+}
+
+fn tool_annotations(raw: Vec<WireAnnotation>) -> Result<Vec<NodeAnnotation>, StoreError> {
+ raw.into_iter()
+ .map(|annotation| {
+ Ok(NodeAnnotation {
+ id: fidget_spinner_core::AnnotationId::fresh(),
+ visibility: if annotation.visible {
+ AnnotationVisibility::Visible
+ } else {
+ AnnotationVisibility::HiddenByDefault
+ },
+ label: annotation.label.map(NonEmptyText::new).transpose()?,
+ body: NonEmptyText::new(annotation.body)?,
+ created_at: time::OffsetDateTime::now_utc(),
+ })
+ })
+ .collect()
+}
+
+fn lineage_attachments(parents: Vec<String>) -> Result<Vec<EdgeAttachment>, StoreError> {
+ parents
+ .into_iter()
+ .map(|parent| {
+ Ok(EdgeAttachment {
+ node_id: crate::parse_node_id(&parent)?,
+ kind: fidget_spinner_core::EdgeKind::Lineage,
+ direction: EdgeAttachmentDirection::ExistingToNew,
+ })
+ })
+ .collect()
+}
+
+fn metric_spec_from_wire(raw: WireMetricSpec) -> Result<MetricSpec, StoreError> {
+ Ok(MetricSpec {
+ metric_key: NonEmptyText::new(raw.key)?,
+ unit: parse_metric_unit_name(&raw.unit)?,
+ objective: crate::parse_optimization_objective(&raw.objective)?,
+ })
+}
+
+fn metric_observation_from_wire(
+ raw: WireMetricObservation,
+) -> Result<MetricObservation, StoreError> {
+ Ok(MetricObservation {
+ metric_key: NonEmptyText::new(raw.key)?,
+ unit: parse_metric_unit_name(&raw.unit)?,
+ objective: crate::parse_optimization_objective(&raw.objective)?,
+ value: raw.value,
+ })
+}
+
+fn command_recipe_from_wire(
+ raw: WireRunCommand,
+ project_root: &Utf8Path,
+) -> Result<CommandRecipe, StoreError> {
+ let working_directory = raw
+ .working_directory
+ .map(Utf8PathBuf::from)
+ .unwrap_or_else(|| project_root.to_path_buf());
+ CommandRecipe::new(
+ working_directory,
+ crate::to_text_vec(raw.argv)?,
+ raw.env.into_iter().collect::<BTreeMap<_, _>>(),
+ )
+ .map_err(StoreError::from)
+}
+
+fn capture_code_snapshot(project_root: &Utf8Path) -> Result<CodeSnapshotRef, StoreError> {
+ crate::capture_code_snapshot(project_root)
+}
+
+fn parse_node_class_name(raw: &str) -> Result<NodeClass, StoreError> {
+ match raw {
+ "contract" => Ok(NodeClass::Contract),
+ "change" => Ok(NodeClass::Change),
+ "run" => Ok(NodeClass::Run),
+ "analysis" => Ok(NodeClass::Analysis),
+ "decision" => Ok(NodeClass::Decision),
+ "research" => Ok(NodeClass::Research),
+ "enabling" => Ok(NodeClass::Enabling),
+ "note" => Ok(NodeClass::Note),
+ other => Err(crate::invalid_input(format!(
+ "unknown node class `{other}`"
+ ))),
+ }
+}
+
+fn parse_metric_unit_name(raw: &str) -> Result<MetricUnit, StoreError> {
+ crate::parse_metric_unit(raw)
+}
+
+fn parse_backend_name(raw: &str) -> Result<ExecutionBackend, StoreError> {
+ match raw {
+ "local_process" => Ok(ExecutionBackend::LocalProcess),
+ "worktree_process" => Ok(ExecutionBackend::WorktreeProcess),
+ "ssh_process" => Ok(ExecutionBackend::SshProcess),
+ other => Err(crate::invalid_input(format!("unknown backend `{other}`"))),
+ }
+}
+
+fn parse_verdict_name(raw: &str) -> Result<FrontierVerdict, StoreError> {
+ match raw {
+ "promote_to_champion" => Ok(FrontierVerdict::PromoteToChampion),
+ "keep_on_frontier" => Ok(FrontierVerdict::KeepOnFrontier),
+ "revert_to_champion" => Ok(FrontierVerdict::RevertToChampion),
+ "archive_dead_end" => Ok(FrontierVerdict::ArchiveDeadEnd),
+ "needs_more_evidence" => Ok(FrontierVerdict::NeedsMoreEvidence),
+ other => Err(crate::invalid_input(format!("unknown verdict `{other}`"))),
+ }
+}
+
+#[derive(Debug, Deserialize)]
+struct FrontierStatusToolArgs {
+ frontier_id: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct FrontierInitToolArgs {
+ label: String,
+ objective: String,
+ contract_title: String,
+ contract_summary: Option<String>,
+ benchmark_suites: Vec<String>,
+ promotion_criteria: Vec<String>,
+ primary_metric: WireMetricSpec,
+ #[serde(default)]
+ supporting_metrics: Vec<WireMetricSpec>,
+ seed_summary: Option<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeCreateToolArgs {
+ class: String,
+ frontier_id: Option<String>,
+ title: String,
+ summary: Option<String>,
+ #[serde(default)]
+ payload: Option<Map<String, Value>>,
+ #[serde(default)]
+ annotations: Vec<WireAnnotation>,
+ #[serde(default)]
+ parents: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct ChangeRecordToolArgs {
+ frontier_id: String,
+ title: String,
+ summary: Option<String>,
+ body: String,
+ hypothesis: Option<String>,
+ base_checkpoint_id: Option<String>,
+ benchmark_suite: Option<String>,
+ #[serde(default)]
+ annotations: Vec<WireAnnotation>,
+ #[serde(default)]
+ parents: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeListToolArgs {
+ frontier_id: Option<String>,
+ class: Option<String>,
+ #[serde(default)]
+ include_archived: bool,
+ limit: Option<u32>,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeReadToolArgs {
+ node_id: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeAnnotateToolArgs {
+ node_id: String,
+ body: String,
+ label: Option<String>,
+ #[serde(default)]
+ visible: bool,
+}
+
+#[derive(Debug, Deserialize)]
+struct NodeArchiveToolArgs {
+ node_id: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct QuickNoteToolArgs {
+ frontier_id: Option<String>,
+ title: String,
+ body: String,
+ #[serde(default)]
+ annotations: Vec<WireAnnotation>,
+ #[serde(default)]
+ parents: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct ResearchRecordToolArgs {
+ frontier_id: Option<String>,
+ title: String,
+ summary: Option<String>,
+ body: String,
+ #[serde(default)]
+ annotations: Vec<WireAnnotation>,
+ #[serde(default)]
+ parents: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct ExperimentCloseToolArgs {
+ frontier_id: String,
+ base_checkpoint_id: String,
+ change_node_id: String,
+ candidate_summary: String,
+ run: WireRun,
+ primary_metric: WireMetricObservation,
+ #[serde(default)]
+ supporting_metrics: Vec<WireMetricObservation>,
+ note: WireFrontierNote,
+ verdict: String,
+ decision_title: String,
+ decision_rationale: String,
+ analysis_node_id: Option<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireAnnotation {
+ body: String,
+ label: Option<String>,
+ #[serde(default)]
+ visible: bool,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireMetricSpec {
+ key: String,
+ unit: String,
+ objective: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireMetricObservation {
+ key: String,
+ unit: String,
+ objective: String,
+ value: f64,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireRun {
+ title: String,
+ summary: Option<String>,
+ backend: String,
+ benchmark_suite: String,
+ command: WireRunCommand,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireRunCommand {
+ working_directory: Option<String>,
+ argv: Vec<String>,
+ #[serde(default)]
+ env: BTreeMap<String, String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct WireFrontierNote {
+ summary: String,
+ #[serde(default)]
+ next_hypotheses: Vec<String>,
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/telemetry.rs b/crates/fidget-spinner-cli/src/mcp/telemetry.rs
new file mode 100644
index 0000000..7206f76
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/telemetry.rs
@@ -0,0 +1,103 @@
+use std::collections::BTreeMap;
+
+use serde::{Deserialize, Serialize};
+
+use crate::mcp::fault::FaultRecord;
+
+#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct OperationTelemetry {
+ pub requests: u64,
+ pub successes: u64,
+ pub errors: u64,
+ pub retries: u64,
+ pub last_latency_ms: Option<u128>,
+}
+
+#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct ServerTelemetry {
+ pub requests: u64,
+ pub successes: u64,
+ pub errors: u64,
+ pub retries: u64,
+ pub worker_restarts: u64,
+ pub host_rollouts: u64,
+ pub last_fault: Option<FaultRecord>,
+ pub operations: BTreeMap<String, OperationTelemetry>,
+}
+
+impl ServerTelemetry {
+ pub fn record_request(&mut self, operation: &str) {
+ self.requests += 1;
+ self.operations
+ .entry(operation.to_owned())
+ .or_default()
+ .requests += 1;
+ }
+
+ pub fn record_success(&mut self, operation: &str, latency_ms: u128) {
+ self.successes += 1;
+ let entry = self.operations.entry(operation.to_owned()).or_default();
+ entry.successes += 1;
+ entry.last_latency_ms = Some(latency_ms);
+ }
+
+ pub fn record_retry(&mut self, operation: &str) {
+ self.retries += 1;
+ self.operations
+ .entry(operation.to_owned())
+ .or_default()
+ .retries += 1;
+ }
+
+ pub fn record_error(&mut self, operation: &str, fault: FaultRecord, latency_ms: u128) {
+ self.errors += 1;
+ self.last_fault = Some(fault.clone());
+ let entry = self.operations.entry(operation.to_owned()).or_default();
+ entry.errors += 1;
+ entry.last_latency_ms = Some(latency_ms);
+ }
+
+ pub fn record_worker_restart(&mut self) {
+ self.worker_restarts += 1;
+ }
+
+ pub fn record_rollout(&mut self) {
+ self.host_rollouts += 1;
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct InitializationHealth {
+ pub ready: bool,
+ pub seed_captured: bool,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct WorkerHealth {
+ pub worker_generation: u64,
+ pub alive: bool,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct BinaryHealth {
+ pub current_executable: String,
+ pub launch_path_stable: bool,
+ pub rollout_pending: bool,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct BindingHealth {
+ pub bound: bool,
+ pub requested_path: Option<String>,
+ pub project_root: Option<String>,
+ pub state_root: Option<String>,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub(crate) struct HealthSnapshot {
+ pub initialization: InitializationHealth,
+ pub binding: BindingHealth,
+ pub worker: WorkerHealth,
+ pub binary: BinaryHealth,
+ pub last_fault: Option<FaultRecord>,
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/worker.rs b/crates/fidget-spinner-cli/src/mcp/worker.rs
new file mode 100644
index 0000000..91c6db9
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/worker.rs
@@ -0,0 +1,66 @@
+use std::io::{self, BufRead, Write};
+use std::path::PathBuf;
+
+use camino::Utf8PathBuf;
+
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+use crate::mcp::protocol::{WorkerOutcome, WorkerRequest, WorkerResponse};
+use crate::mcp::service::WorkerService;
+
+pub(crate) fn serve(project: PathBuf) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let project = Utf8PathBuf::from(project.to_string_lossy().into_owned());
+ let mut service = WorkerService::new(&project)?;
+ let stdin = io::stdin();
+ let mut stdout = io::stdout().lock();
+
+ for line in stdin.lock().lines() {
+ let line = match line {
+ Ok(line) => line,
+ Err(error) => {
+ eprintln!("worker stdin failure: {error}");
+ continue;
+ }
+ };
+ if line.trim().is_empty() {
+ continue;
+ }
+
+ let request = match serde_json::from_str::<WorkerRequest>(&line) {
+ Ok(request) => request,
+ Err(error) => {
+ let response = WorkerResponse {
+ id: crate::mcp::protocol::HostRequestId(0),
+ outcome: WorkerOutcome::Fault {
+ fault: FaultRecord::new(
+ FaultKind::InvalidInput,
+ FaultStage::Protocol,
+ "worker.parse",
+ format!("invalid worker request: {error}"),
+ ),
+ },
+ };
+ write_message(&mut stdout, &response)?;
+ continue;
+ }
+ };
+
+ let WorkerRequest::Execute { id, operation } = request;
+ let outcome = match service.execute(operation) {
+ Ok(result) => WorkerOutcome::Success { result },
+ Err(fault) => WorkerOutcome::Fault { fault },
+ };
+ write_message(&mut stdout, &WorkerResponse { id, outcome })?;
+ }
+
+ Ok(())
+}
+
+fn write_message(
+ stdout: &mut impl Write,
+ response: &WorkerResponse,
+) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ serde_json::to_writer(&mut *stdout, response)?;
+ stdout.write_all(b"\n")?;
+ stdout.flush()?;
+ Ok(())
+}
diff --git a/crates/fidget-spinner-cli/tests/mcp_hardening.rs b/crates/fidget-spinner-cli/tests/mcp_hardening.rs
new file mode 100644
index 0000000..8d3cd9d
--- /dev/null
+++ b/crates/fidget-spinner-cli/tests/mcp_hardening.rs
@@ -0,0 +1,424 @@
+use std::fs;
+use std::io::{self, BufRead, BufReader, Write};
+use std::path::PathBuf;
+use std::process::{Child, ChildStdin, ChildStdout, Command, Stdio};
+
+use camino::Utf8PathBuf;
+use clap as _;
+use dirs as _;
+use fidget_spinner_core::NonEmptyText;
+use fidget_spinner_store_sqlite::{ListNodesQuery, ProjectStore};
+use serde as _;
+use serde_json::{Value, json};
+use time as _;
+use uuid as _;
+
+type TestResult<T = ()> = Result<T, Box<dyn std::error::Error>>;
+
+fn must<T, E: std::fmt::Display>(result: Result<T, E>, context: &str) -> TestResult<T> {
+ result.map_err(|error| io::Error::other(format!("{context}: {error}")).into())
+}
+
+fn must_some<T>(value: Option<T>, context: &str) -> TestResult<T> {
+ value.ok_or_else(|| io::Error::other(context).into())
+}
+
+fn temp_project_root(name: &str) -> TestResult<Utf8PathBuf> {
+ let root = std::env::temp_dir().join(format!(
+ "fidget_spinner_mcp_{name}_{}_{}",
+ std::process::id(),
+ must(
+ std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH),
+ "current time after unix epoch",
+ )?
+ .as_nanos()
+ ));
+ must(fs::create_dir_all(&root), "create temp project root")?;
+ Ok(Utf8PathBuf::from(root.to_string_lossy().into_owned()))
+}
+
+fn init_project(root: &Utf8PathBuf) -> TestResult {
+ let _store = must(
+ ProjectStore::init(
+ root,
+ must(NonEmptyText::new("mcp test project"), "display name")?,
+ must(NonEmptyText::new("local.mcp.test"), "namespace")?,
+ ),
+ "init project store",
+ )?;
+ Ok(())
+}
+
+fn binary_path() -> PathBuf {
+ PathBuf::from(env!("CARGO_BIN_EXE_fidget-spinner-cli"))
+}
+
+struct McpHarness {
+ child: Child,
+ stdin: ChildStdin,
+ stdout: BufReader<ChildStdout>,
+}
+
+impl McpHarness {
+ fn spawn(project_root: Option<&Utf8PathBuf>, envs: &[(&str, String)]) -> TestResult<Self> {
+ let mut command = Command::new(binary_path());
+ let _ = command
+ .arg("mcp")
+ .arg("serve")
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit());
+ if let Some(project_root) = project_root {
+ let _ = command.arg("--project").arg(project_root.as_str());
+ }
+ for (key, value) in envs {
+ let _ = command.env(key, value);
+ }
+ let mut child = must(command.spawn(), "spawn mcp host")?;
+ let stdin = must_some(child.stdin.take(), "host stdin")?;
+ let stdout = BufReader::new(must_some(child.stdout.take(), "host stdout")?);
+ Ok(Self {
+ child,
+ stdin,
+ stdout,
+ })
+ }
+
+ fn initialize(&mut self) -> TestResult<Value> {
+ self.request(json!({
+ "jsonrpc": "2.0",
+ "id": 1,
+ "method": "initialize",
+ "params": {
+ "protocolVersion": "2025-11-25",
+ "capabilities": {},
+ "clientInfo": { "name": "mcp-hardening-test", "version": "0" }
+ }
+ }))
+ }
+
+ fn notify_initialized(&mut self) -> TestResult {
+ self.notify(json!({
+ "jsonrpc": "2.0",
+ "method": "notifications/initialized",
+ }))
+ }
+
+ fn tools_list(&mut self) -> TestResult<Value> {
+ self.request(json!({
+ "jsonrpc": "2.0",
+ "id": 2,
+ "method": "tools/list",
+ "params": {},
+ }))
+ }
+
+ fn bind_project(&mut self, id: u64, path: &Utf8PathBuf) -> TestResult<Value> {
+ self.call_tool(id, "project.bind", json!({ "path": path.as_str() }))
+ }
+
+ fn call_tool(&mut self, id: u64, name: &str, arguments: Value) -> TestResult<Value> {
+ self.request(json!({
+ "jsonrpc": "2.0",
+ "id": id,
+ "method": "tools/call",
+ "params": {
+ "name": name,
+ "arguments": arguments,
+ }
+ }))
+ }
+
+ fn request(&mut self, message: Value) -> TestResult<Value> {
+ let encoded = must(serde_json::to_string(&message), "request json")?;
+ must(writeln!(self.stdin, "{encoded}"), "write request")?;
+ must(self.stdin.flush(), "flush request")?;
+ let mut line = String::new();
+ let byte_count = must(self.stdout.read_line(&mut line), "read response")?;
+ if byte_count == 0 {
+ return Err(io::Error::other("unexpected EOF reading response").into());
+ }
+ must(serde_json::from_str(&line), "response json")
+ }
+
+ fn notify(&mut self, message: Value) -> TestResult {
+ let encoded = must(serde_json::to_string(&message), "notify json")?;
+ must(writeln!(self.stdin, "{encoded}"), "write notify")?;
+ must(self.stdin.flush(), "flush notify")?;
+ Ok(())
+ }
+}
+
+impl Drop for McpHarness {
+ fn drop(&mut self) {
+ let _ = self.child.kill();
+ let _ = self.child.wait();
+ }
+}
+
+fn tool_content(response: &Value) -> &Value {
+ &response["result"]["structuredContent"]
+}
+
+#[test]
+fn cold_start_exposes_health_and_telemetry() -> TestResult {
+ let project_root = temp_project_root("cold_start")?;
+ init_project(&project_root)?;
+
+ let mut harness = McpHarness::spawn(None, &[])?;
+ let initialize = harness.initialize()?;
+ assert_eq!(
+ initialize["result"]["protocolVersion"].as_str(),
+ Some("2025-11-25")
+ );
+ harness.notify_initialized()?;
+
+ let tools = harness.tools_list()?;
+ let tool_count = must_some(tools["result"]["tools"].as_array(), "tools array")?.len();
+ assert!(tool_count >= 18);
+
+ let health = harness.call_tool(3, "system.health", json!({}))?;
+ assert_eq!(
+ tool_content(&health)["initialization"]["ready"].as_bool(),
+ Some(true)
+ );
+ assert_eq!(
+ tool_content(&health)["initialization"]["seed_captured"].as_bool(),
+ Some(true)
+ );
+ assert_eq!(
+ tool_content(&health)["binding"]["bound"].as_bool(),
+ Some(false)
+ );
+
+ let telemetry = harness.call_tool(4, "system.telemetry", json!({}))?;
+ assert!(tool_content(&telemetry)["requests"].as_u64().unwrap_or(0) >= 3);
+
+ let skills = harness.call_tool(15, "skill.list", json!({}))?;
+ let skill_names = must_some(
+ tool_content(&skills)["skills"].as_array(),
+ "bundled skills array",
+ )?
+ .iter()
+ .filter_map(|skill| skill["name"].as_str())
+ .collect::<Vec<_>>();
+ assert!(skill_names.contains(&"fidget-spinner"));
+ assert!(skill_names.contains(&"frontier-loop"));
+
+ let base_skill = harness.call_tool(16, "skill.show", json!({"name": "fidget-spinner"}))?;
+ assert_eq!(
+ tool_content(&base_skill)["name"].as_str(),
+ Some("fidget-spinner")
+ );
+ Ok(())
+}
+
+#[test]
+fn safe_request_retries_after_worker_crash() -> TestResult {
+ let project_root = temp_project_root("crash_retry")?;
+ init_project(&project_root)?;
+
+ let mut harness = McpHarness::spawn(
+ None,
+ &[(
+ "FIDGET_SPINNER_MCP_TEST_HOST_CRASH_ONCE_KEY",
+ "tools/call:project.status".to_owned(),
+ )],
+ )?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+ let bind = harness.bind_project(3, &project_root)?;
+ assert_eq!(bind["result"]["isError"].as_bool(), Some(false));
+
+ let response = harness.call_tool(5, "project.status", json!({}))?;
+ assert_eq!(response["result"]["isError"].as_bool(), Some(false));
+
+ let telemetry = harness.call_tool(6, "system.telemetry", json!({}))?;
+ assert_eq!(tool_content(&telemetry)["retries"].as_u64(), Some(1));
+ assert_eq!(
+ tool_content(&telemetry)["worker_restarts"].as_u64(),
+ Some(1)
+ );
+ Ok(())
+}
+
+#[test]
+fn safe_request_retries_after_worker_transient_fault() -> TestResult {
+ let project_root = temp_project_root("transient_retry")?;
+ init_project(&project_root)?;
+ let marker = project_root.join("transient_once.marker");
+
+ let mut harness = McpHarness::spawn(
+ None,
+ &[
+ (
+ "FIDGET_SPINNER_MCP_TEST_WORKER_TRANSIENT_ONCE_KEY",
+ "tools/call:project.status".to_owned(),
+ ),
+ (
+ "FIDGET_SPINNER_MCP_TEST_WORKER_TRANSIENT_ONCE_MARKER",
+ marker.to_string(),
+ ),
+ ],
+ )?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+ let bind = harness.bind_project(12, &project_root)?;
+ assert_eq!(bind["result"]["isError"].as_bool(), Some(false));
+
+ let response = harness.call_tool(13, "project.status", json!({}))?;
+ assert_eq!(response["result"]["isError"].as_bool(), Some(false));
+
+ let telemetry = harness.call_tool(14, "system.telemetry", json!({}))?;
+ assert_eq!(tool_content(&telemetry)["retries"].as_u64(), Some(1));
+ assert_eq!(
+ tool_content(&telemetry)["worker_restarts"].as_u64(),
+ Some(1)
+ );
+ Ok(())
+}
+
+#[test]
+fn side_effecting_request_is_not_replayed_after_worker_crash() -> TestResult {
+ let project_root = temp_project_root("no_replay")?;
+ init_project(&project_root)?;
+
+ let mut harness = McpHarness::spawn(
+ None,
+ &[(
+ "FIDGET_SPINNER_MCP_TEST_HOST_CRASH_ONCE_KEY",
+ "tools/call:research.record".to_owned(),
+ )],
+ )?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+ let bind = harness.bind_project(6, &project_root)?;
+ assert_eq!(bind["result"]["isError"].as_bool(), Some(false));
+
+ let response = harness.call_tool(
+ 7,
+ "research.record",
+ json!({
+ "title": "should not duplicate",
+ "body": "host crash before worker execution",
+ }),
+ )?;
+ assert_eq!(response["result"]["isError"].as_bool(), Some(true));
+
+ let nodes = harness.call_tool(8, "node.list", json!({}))?;
+ assert_eq!(
+ must_some(tool_content(&nodes).as_array(), "node list")?.len(),
+ 0
+ );
+
+ let telemetry = harness.call_tool(9, "system.telemetry", json!({}))?;
+ assert_eq!(tool_content(&telemetry)["retries"].as_u64(), Some(0));
+ Ok(())
+}
+
+#[test]
+fn forced_rollout_preserves_initialized_state() -> TestResult {
+ let project_root = temp_project_root("rollout")?;
+ init_project(&project_root)?;
+
+ let mut harness = McpHarness::spawn(
+ None,
+ &[(
+ "FIDGET_SPINNER_MCP_TEST_FORCE_ROLLOUT_KEY",
+ "tools/call:project.status".to_owned(),
+ )],
+ )?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+ let bind = harness.bind_project(9, &project_root)?;
+ assert_eq!(bind["result"]["isError"].as_bool(), Some(false));
+
+ let first = harness.call_tool(10, "project.status", json!({}))?;
+ assert_eq!(first["result"]["isError"].as_bool(), Some(false));
+
+ let second = harness.call_tool(11, "project.status", json!({}))?;
+ assert_eq!(second["result"]["isError"].as_bool(), Some(false));
+
+ let telemetry = harness.call_tool(12, "system.telemetry", json!({}))?;
+ assert_eq!(tool_content(&telemetry)["host_rollouts"].as_u64(), Some(1));
+ Ok(())
+}
+
+#[test]
+fn unbound_project_tools_fail_with_bind_hint() -> TestResult {
+ let mut harness = McpHarness::spawn(None, &[])?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+
+ let response = harness.call_tool(20, "project.status", json!({}))?;
+ assert_eq!(response["result"]["isError"].as_bool(), Some(true));
+ let message = response["result"]["structuredContent"]["message"].as_str();
+ assert!(message.is_some_and(|message| message.contains("project.bind")));
+ Ok(())
+}
+
+#[test]
+fn bind_retargets_writes_to_sibling_project_root() -> TestResult {
+ let spinner_root = temp_project_root("spinner_root")?;
+ let libgrid_root = temp_project_root("libgrid_root")?;
+ init_project(&spinner_root)?;
+ init_project(&libgrid_root)?;
+ let notes_dir = libgrid_root.join("notes");
+ must(
+ fs::create_dir_all(notes_dir.as_std_path()),
+ "create nested notes dir",
+ )?;
+
+ let mut harness = McpHarness::spawn(Some(&spinner_root), &[])?;
+ let _ = harness.initialize()?;
+ harness.notify_initialized()?;
+
+ let initial_status = harness.call_tool(30, "project.status", json!({}))?;
+ assert_eq!(
+ tool_content(&initial_status)["project_root"].as_str(),
+ Some(spinner_root.as_str())
+ );
+
+ let rebind = harness.bind_project(31, &notes_dir)?;
+ assert_eq!(rebind["result"]["isError"].as_bool(), Some(false));
+ assert_eq!(
+ tool_content(&rebind)["project_root"].as_str(),
+ Some(libgrid_root.as_str())
+ );
+
+ let status = harness.call_tool(32, "project.status", json!({}))?;
+ assert_eq!(
+ tool_content(&status)["project_root"].as_str(),
+ Some(libgrid_root.as_str())
+ );
+
+ let note = harness.call_tool(
+ 33,
+ "note.quick",
+ json!({
+ "title": "libgrid dogfood note",
+ "body": "rebind should redirect writes",
+ }),
+ )?;
+ assert_eq!(note["result"]["isError"].as_bool(), Some(false));
+
+ let spinner_store = must(ProjectStore::open(&spinner_root), "open spinner store")?;
+ let libgrid_store = must(ProjectStore::open(&libgrid_root), "open libgrid store")?;
+ assert_eq!(
+ must(
+ spinner_store.list_nodes(ListNodesQuery::default()),
+ "list spinner nodes after rebind"
+ )?
+ .len(),
+ 0
+ );
+ assert_eq!(
+ must(
+ libgrid_store.list_nodes(ListNodesQuery::default()),
+ "list libgrid nodes after rebind"
+ )?
+ .len(),
+ 1
+ );
+ Ok(())
+}
diff --git a/crates/fidget-spinner-core/Cargo.toml b/crates/fidget-spinner-core/Cargo.toml
new file mode 100644
index 0000000..b472b91
--- /dev/null
+++ b/crates/fidget-spinner-core/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "fidget-spinner-core"
+description = "Core domain model for a local-first experimental DAG"
+edition.workspace = true
+license.workspace = true
+publish = false
+rust-version.workspace = true
+version.workspace = true
+
+[dependencies]
+camino.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+thiserror.workspace = true
+time.workspace = true
+uuid.workspace = true
+
+[lints]
+workspace = true
diff --git a/crates/fidget-spinner-core/src/error.rs b/crates/fidget-spinner-core/src/error.rs
new file mode 100644
index 0000000..8e976c7
--- /dev/null
+++ b/crates/fidget-spinner-core/src/error.rs
@@ -0,0 +1,9 @@
+use thiserror::Error;
+
+#[derive(Clone, Debug, Eq, Error, PartialEq)]
+pub enum CoreError {
+ #[error("text values must not be blank")]
+ EmptyText,
+ #[error("command recipes must contain at least one argv element")]
+ EmptyCommand,
+}
diff --git a/crates/fidget-spinner-core/src/id.rs b/crates/fidget-spinner-core/src/id.rs
new file mode 100644
index 0000000..ea2cd5a
--- /dev/null
+++ b/crates/fidget-spinner-core/src/id.rs
@@ -0,0 +1,46 @@
+use std::fmt::{self, Display, Formatter};
+
+use serde::{Deserialize, Serialize};
+use uuid::Uuid;
+
+macro_rules! define_id {
+ ($name:ident) => {
+ #[derive(
+ Clone, Copy, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
+ )]
+ #[serde(transparent)]
+ pub struct $name(Uuid);
+
+ impl $name {
+ #[must_use]
+ pub fn fresh() -> Self {
+ Self(Uuid::now_v7())
+ }
+
+ #[must_use]
+ pub fn from_uuid(uuid: Uuid) -> Self {
+ Self(uuid)
+ }
+
+ #[must_use]
+ pub fn as_uuid(self) -> Uuid {
+ self.0
+ }
+ }
+
+ impl Display for $name {
+ fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
+ Display::fmt(&self.0, formatter)
+ }
+ }
+ };
+}
+
+define_id!(AgentSessionId);
+define_id!(AnnotationId);
+define_id!(ArtifactId);
+define_id!(CheckpointId);
+define_id!(ExperimentId);
+define_id!(FrontierId);
+define_id!(NodeId);
+define_id!(RunId);
diff --git a/crates/fidget-spinner-core/src/lib.rs b/crates/fidget-spinner-core/src/lib.rs
new file mode 100644
index 0000000..f368268
--- /dev/null
+++ b/crates/fidget-spinner-core/src/lib.rs
@@ -0,0 +1,26 @@
+//! Core domain types for the Fidget Spinner frontier machine.
+//!
+//! The product direction is intentionally local-first and agent-first: the DAG
+//! is the canonical truth, while frontier state is a derived operational
+//! projection over that graph. The global spine is intentionally narrow so
+//! projects can carry richer payloads and annotations without fossilizing the
+//! whole system into one universal schema.
+
+mod error;
+mod id;
+mod model;
+
+pub use crate::error::CoreError;
+pub use crate::id::{
+ AgentSessionId, AnnotationId, ArtifactId, CheckpointId, ExperimentId, FrontierId, NodeId, RunId,
+};
+pub use crate::model::{
+ AdmissionState, AnnotationVisibility, ArtifactKind, ArtifactRef, CheckpointDisposition,
+ CheckpointRecord, CheckpointSnapshotRef, CodeSnapshotRef, CommandRecipe, CompletedExperiment,
+ DagEdge, DagNode, DiagnosticSeverity, EdgeKind, EvaluationProtocol, ExecutionBackend,
+ ExperimentResult, FieldPresence, FieldRole, FrontierContract, FrontierNote, FrontierProjection,
+ FrontierRecord, FrontierStatus, FrontierVerdict, GitCommitHash, InferencePolicy, JsonObject,
+ MetricObservation, MetricSpec, MetricUnit, NodeAnnotation, NodeClass, NodeDiagnostics,
+ NodePayload, NodeTrack, NonEmptyText, OptimizationObjective, PayloadSchemaRef,
+ ProjectFieldSpec, ProjectSchema, RunRecord, RunStatus, ValidationDiagnostic,
+};
diff --git a/crates/fidget-spinner-core/src/model.rs b/crates/fidget-spinner-core/src/model.rs
new file mode 100644
index 0000000..f0d1818
--- /dev/null
+++ b/crates/fidget-spinner-core/src/model.rs
@@ -0,0 +1,693 @@
+use std::collections::{BTreeMap, BTreeSet};
+use std::fmt::{self, Display, Formatter};
+
+use camino::Utf8PathBuf;
+use serde::{Deserialize, Serialize};
+use serde_json::{Map, Value};
+use time::OffsetDateTime;
+
+use crate::{
+ AgentSessionId, AnnotationId, ArtifactId, CheckpointId, CoreError, ExperimentId, FrontierId,
+ NodeId, RunId,
+};
+
+#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+#[serde(transparent)]
+pub struct NonEmptyText(String);
+
+impl NonEmptyText {
+ pub fn new(value: impl Into<String>) -> Result<Self, CoreError> {
+ let value = value.into();
+ if value.trim().is_empty() {
+ return Err(CoreError::EmptyText);
+ }
+ Ok(Self(value))
+ }
+
+ #[must_use]
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl Display for NonEmptyText {
+ fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
+ formatter.write_str(&self.0)
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+#[serde(transparent)]
+pub struct GitCommitHash(NonEmptyText);
+
+impl GitCommitHash {
+ pub fn new(value: impl Into<String>) -> Result<Self, CoreError> {
+ NonEmptyText::new(value).map(Self)
+ }
+
+ #[must_use]
+ pub fn as_str(&self) -> &str {
+ self.0.as_str()
+ }
+}
+
+impl Display for GitCommitHash {
+ fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
+ Display::fmt(&self.0, formatter)
+ }
+}
+
+pub type JsonObject = Map<String, Value>;
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum NodeClass {
+ Contract,
+ Change,
+ Run,
+ Analysis,
+ Decision,
+ Research,
+ Enabling,
+ Note,
+}
+
+impl NodeClass {
+ #[must_use]
+ pub const fn as_str(self) -> &'static str {
+ match self {
+ Self::Contract => "contract",
+ Self::Change => "change",
+ Self::Run => "run",
+ Self::Analysis => "analysis",
+ Self::Decision => "decision",
+ Self::Research => "research",
+ Self::Enabling => "enabling",
+ Self::Note => "note",
+ }
+ }
+
+ #[must_use]
+ pub const fn default_track(self) -> NodeTrack {
+ match self {
+ Self::Contract | Self::Change | Self::Run | Self::Analysis | Self::Decision => {
+ NodeTrack::CorePath
+ }
+ Self::Research | Self::Enabling | Self::Note => NodeTrack::OffPath,
+ }
+ }
+}
+
+impl Display for NodeClass {
+ fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
+ formatter.write_str(self.as_str())
+ }
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum NodeTrack {
+ CorePath,
+ OffPath,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum AnnotationVisibility {
+ HiddenByDefault,
+ Visible,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum DiagnosticSeverity {
+ Error,
+ Warning,
+ Info,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum FieldPresence {
+ Required,
+ Recommended,
+ Optional,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum FieldRole {
+ Index,
+ ProjectionGate,
+ RenderOnly,
+ Opaque,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum InferencePolicy {
+ ManualOnly,
+ ModelMayInfer,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub enum FrontierStatus {
+ Exploring,
+ Paused,
+ Saturated,
+ Archived,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub enum CheckpointDisposition {
+ Champion,
+ FrontierCandidate,
+ Baseline,
+ DeadEnd,
+ Archived,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum MetricUnit {
+ Seconds,
+ Bytes,
+ Count,
+ Ratio,
+ Custom,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum OptimizationObjective {
+ Minimize,
+ Maximize,
+ Target,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub enum RunStatus {
+ Queued,
+ Running,
+ Succeeded,
+ Failed,
+ Cancelled,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub enum ExecutionBackend {
+ LocalProcess,
+ WorktreeProcess,
+ SshProcess,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub enum FrontierVerdict {
+ PromoteToChampion,
+ KeepOnFrontier,
+ RevertToChampion,
+ ArchiveDeadEnd,
+ NeedsMoreEvidence,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub enum AdmissionState {
+ Admitted,
+ Rejected,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub struct PayloadSchemaRef {
+ pub namespace: NonEmptyText,
+ pub version: u32,
+}
+
+#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
+pub struct NodePayload {
+ pub schema: Option<PayloadSchemaRef>,
+ pub fields: JsonObject,
+}
+
+impl NodePayload {
+ #[must_use]
+ pub fn empty() -> Self {
+ Self {
+ schema: None,
+ fields: JsonObject::new(),
+ }
+ }
+
+ #[must_use]
+ pub fn with_schema(schema: PayloadSchemaRef, fields: JsonObject) -> Self {
+ Self {
+ schema: Some(schema),
+ fields,
+ }
+ }
+
+ #[must_use]
+ pub fn field(&self, name: &str) -> Option<&Value> {
+ self.fields.get(name)
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct NodeAnnotation {
+ pub id: AnnotationId,
+ pub visibility: AnnotationVisibility,
+ pub label: Option<NonEmptyText>,
+ pub body: NonEmptyText,
+ pub created_at: OffsetDateTime,
+}
+
+impl NodeAnnotation {
+ #[must_use]
+ pub fn hidden(body: NonEmptyText) -> Self {
+ Self {
+ id: AnnotationId::fresh(),
+ visibility: AnnotationVisibility::HiddenByDefault,
+ label: None,
+ body,
+ created_at: OffsetDateTime::now_utc(),
+ }
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct ValidationDiagnostic {
+ pub severity: DiagnosticSeverity,
+ pub code: String,
+ pub message: NonEmptyText,
+ pub field_name: Option<String>,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct NodeDiagnostics {
+ pub admission: AdmissionState,
+ pub items: Vec<ValidationDiagnostic>,
+}
+
+impl NodeDiagnostics {
+ #[must_use]
+ pub const fn admitted() -> Self {
+ Self {
+ admission: AdmissionState::Admitted,
+ items: Vec::new(),
+ }
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct ProjectFieldSpec {
+ pub name: NonEmptyText,
+ pub node_classes: BTreeSet<NodeClass>,
+ pub presence: FieldPresence,
+ pub severity: DiagnosticSeverity,
+ pub role: FieldRole,
+ pub inference_policy: InferencePolicy,
+}
+
+impl ProjectFieldSpec {
+ #[must_use]
+ pub fn applies_to(&self, class: NodeClass) -> bool {
+ self.node_classes.is_empty() || self.node_classes.contains(&class)
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct ProjectSchema {
+ pub namespace: NonEmptyText,
+ pub version: u32,
+ pub fields: Vec<ProjectFieldSpec>,
+}
+
+impl ProjectSchema {
+ #[must_use]
+ pub fn default_with_namespace(namespace: NonEmptyText) -> Self {
+ Self {
+ namespace,
+ version: 1,
+ fields: Vec::new(),
+ }
+ }
+
+ #[must_use]
+ pub fn schema_ref(&self) -> PayloadSchemaRef {
+ PayloadSchemaRef {
+ namespace: self.namespace.clone(),
+ version: self.version,
+ }
+ }
+
+ #[must_use]
+ pub fn validate_node(&self, class: NodeClass, payload: &NodePayload) -> NodeDiagnostics {
+ let items = self
+ .fields
+ .iter()
+ .filter(|field| field.applies_to(class))
+ .filter_map(|field| {
+ let is_missing = payload.field(field.name.as_str()).is_none();
+ if !is_missing || field.presence == FieldPresence::Optional {
+ return None;
+ }
+ Some(ValidationDiagnostic {
+ severity: field.severity,
+ code: format!("missing.{}", field.name.as_str()),
+ message: validation_message(format!(
+ "missing project payload field `{}`",
+ field.name.as_str()
+ )),
+ field_name: Some(field.name.as_str().to_owned()),
+ })
+ })
+ .collect();
+ NodeDiagnostics {
+ admission: AdmissionState::Admitted,
+ items,
+ }
+ }
+}
+
+fn validation_message(value: String) -> NonEmptyText {
+ match NonEmptyText::new(value) {
+ Ok(message) => message,
+ Err(_) => unreachable!("validation diagnostics are never empty"),
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
+pub struct DagNode {
+ pub id: NodeId,
+ pub class: NodeClass,
+ pub track: NodeTrack,
+ pub frontier_id: Option<FrontierId>,
+ pub archived: bool,
+ pub title: NonEmptyText,
+ pub summary: Option<NonEmptyText>,
+ pub payload: NodePayload,
+ pub annotations: Vec<NodeAnnotation>,
+ pub diagnostics: NodeDiagnostics,
+ pub agent_session_id: Option<AgentSessionId>,
+ pub created_at: OffsetDateTime,
+ pub updated_at: OffsetDateTime,
+}
+
+impl DagNode {
+ #[must_use]
+ pub fn new(
+ class: NodeClass,
+ frontier_id: Option<FrontierId>,
+ title: NonEmptyText,
+ summary: Option<NonEmptyText>,
+ payload: NodePayload,
+ diagnostics: NodeDiagnostics,
+ ) -> Self {
+ let now = OffsetDateTime::now_utc();
+ Self {
+ id: NodeId::fresh(),
+ class,
+ track: class.default_track(),
+ frontier_id,
+ archived: false,
+ title,
+ summary,
+ payload,
+ annotations: Vec::new(),
+ diagnostics,
+ agent_session_id: None,
+ created_at: now,
+ updated_at: now,
+ }
+ }
+
+ #[must_use]
+ pub fn is_core_path(&self) -> bool {
+ self.track == NodeTrack::CorePath
+ }
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub enum EdgeKind {
+ Lineage,
+ Evidence,
+ Comparison,
+ Supersedes,
+ Annotation,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct DagEdge {
+ pub source_id: NodeId,
+ pub target_id: NodeId,
+ pub kind: EdgeKind,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub enum ArtifactKind {
+ Note,
+ Patch,
+ BenchmarkBundle,
+ MetricSeries,
+ Table,
+ Plot,
+ Log,
+ Binary,
+ Checkpoint,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct ArtifactRef {
+ pub id: ArtifactId,
+ pub kind: ArtifactKind,
+ pub label: NonEmptyText,
+ pub path: Utf8PathBuf,
+ pub media_type: Option<NonEmptyText>,
+ pub produced_by_run: Option<RunId>,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct CodeSnapshotRef {
+ pub repo_root: Utf8PathBuf,
+ pub worktree_root: Utf8PathBuf,
+ pub worktree_name: Option<NonEmptyText>,
+ pub head_commit: Option<GitCommitHash>,
+ pub dirty_paths: BTreeSet<Utf8PathBuf>,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct CheckpointSnapshotRef {
+ pub repo_root: Utf8PathBuf,
+ pub worktree_root: Utf8PathBuf,
+ pub worktree_name: Option<NonEmptyText>,
+ pub commit_hash: GitCommitHash,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct CommandRecipe {
+ pub working_directory: Utf8PathBuf,
+ pub argv: Vec<NonEmptyText>,
+ pub env: BTreeMap<String, String>,
+}
+
+impl CommandRecipe {
+ pub fn new(
+ working_directory: Utf8PathBuf,
+ argv: Vec<NonEmptyText>,
+ env: BTreeMap<String, String>,
+ ) -> Result<Self, CoreError> {
+ if argv.is_empty() {
+ return Err(CoreError::EmptyCommand);
+ }
+ Ok(Self {
+ working_directory,
+ argv,
+ env,
+ })
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+pub struct MetricSpec {
+ pub metric_key: NonEmptyText,
+ pub unit: MetricUnit,
+ pub objective: OptimizationObjective,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct EvaluationProtocol {
+ pub benchmark_suites: BTreeSet<NonEmptyText>,
+ pub primary_metric: MetricSpec,
+ pub supporting_metrics: BTreeSet<MetricSpec>,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct FrontierContract {
+ pub objective: NonEmptyText,
+ pub evaluation: EvaluationProtocol,
+ pub promotion_criteria: Vec<NonEmptyText>,
+}
+
+#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
+pub struct MetricObservation {
+ pub metric_key: NonEmptyText,
+ pub unit: MetricUnit,
+ pub objective: OptimizationObjective,
+ pub value: f64,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct FrontierRecord {
+ pub id: FrontierId,
+ pub label: NonEmptyText,
+ pub root_contract_node_id: NodeId,
+ pub status: FrontierStatus,
+ pub created_at: OffsetDateTime,
+ pub updated_at: OffsetDateTime,
+}
+
+impl FrontierRecord {
+ #[must_use]
+ pub fn new(label: NonEmptyText, root_contract_node_id: NodeId) -> Self {
+ Self::with_id(FrontierId::fresh(), label, root_contract_node_id)
+ }
+
+ #[must_use]
+ pub fn with_id(id: FrontierId, label: NonEmptyText, root_contract_node_id: NodeId) -> Self {
+ let now = OffsetDateTime::now_utc();
+ Self {
+ id,
+ label,
+ root_contract_node_id,
+ status: FrontierStatus::Exploring,
+ created_at: now,
+ updated_at: now,
+ }
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct CheckpointRecord {
+ pub id: CheckpointId,
+ pub frontier_id: FrontierId,
+ pub node_id: NodeId,
+ pub snapshot: CheckpointSnapshotRef,
+ pub disposition: CheckpointDisposition,
+ pub summary: NonEmptyText,
+ pub created_at: OffsetDateTime,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct RunRecord {
+ pub node_id: NodeId,
+ pub run_id: RunId,
+ pub frontier_id: Option<FrontierId>,
+ pub status: RunStatus,
+ pub backend: ExecutionBackend,
+ pub code_snapshot: Option<CodeSnapshotRef>,
+ pub benchmark_suite: Option<NonEmptyText>,
+ pub command: CommandRecipe,
+ pub started_at: Option<OffsetDateTime>,
+ pub finished_at: Option<OffsetDateTime>,
+}
+
+#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
+pub struct ExperimentResult {
+ pub benchmark_suite: NonEmptyText,
+ pub primary_metric: MetricObservation,
+ pub supporting_metrics: Vec<MetricObservation>,
+ pub benchmark_bundle: Option<ArtifactId>,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct FrontierNote {
+ pub summary: NonEmptyText,
+ pub next_hypotheses: Vec<NonEmptyText>,
+}
+
+#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
+pub struct CompletedExperiment {
+ pub id: ExperimentId,
+ pub frontier_id: FrontierId,
+ pub base_checkpoint_id: CheckpointId,
+ pub candidate_checkpoint_id: CheckpointId,
+ pub change_node_id: NodeId,
+ pub run_node_id: NodeId,
+ pub run_id: RunId,
+ pub analysis_node_id: Option<NodeId>,
+ pub decision_node_id: NodeId,
+ pub result: ExperimentResult,
+ pub note: FrontierNote,
+ pub verdict: FrontierVerdict,
+ pub created_at: OffsetDateTime,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct FrontierProjection {
+ pub frontier: FrontierRecord,
+ pub champion_checkpoint_id: Option<CheckpointId>,
+ pub candidate_checkpoint_ids: BTreeSet<CheckpointId>,
+ pub experiment_count: u64,
+}
+
+#[cfg(test)]
+mod tests {
+ use std::collections::{BTreeMap, BTreeSet};
+
+ use camino::Utf8PathBuf;
+ use serde_json::json;
+
+ use super::{
+ CommandRecipe, DagNode, DiagnosticSeverity, FieldPresence, FieldRole, InferencePolicy,
+ JsonObject, NodeClass, NodePayload, NonEmptyText, ProjectFieldSpec, ProjectSchema,
+ };
+ use crate::CoreError;
+
+ #[test]
+ fn non_empty_text_rejects_blank_input() {
+ let text = NonEmptyText::new(" ");
+ assert_eq!(text, Err(CoreError::EmptyText));
+ }
+
+ #[test]
+ fn command_recipe_requires_argv() {
+ let recipe = CommandRecipe::new(
+ Utf8PathBuf::from("/tmp/worktree"),
+ Vec::new(),
+ BTreeMap::new(),
+ );
+ assert_eq!(recipe, Err(CoreError::EmptyCommand));
+ }
+
+ #[test]
+ fn schema_validation_warns_without_rejecting_ingest() -> Result<(), CoreError> {
+ let schema = ProjectSchema {
+ namespace: NonEmptyText::new("local.libgrid")?,
+ version: 1,
+ fields: vec![ProjectFieldSpec {
+ name: NonEmptyText::new("hypothesis")?,
+ node_classes: BTreeSet::from([NodeClass::Change]),
+ presence: FieldPresence::Required,
+ severity: DiagnosticSeverity::Warning,
+ role: FieldRole::ProjectionGate,
+ inference_policy: InferencePolicy::ManualOnly,
+ }],
+ };
+ let payload = NodePayload::with_schema(schema.schema_ref(), JsonObject::new());
+ let diagnostics = schema.validate_node(NodeClass::Change, &payload);
+
+ assert_eq!(diagnostics.admission, super::AdmissionState::Admitted);
+ assert_eq!(diagnostics.items.len(), 1);
+ assert_eq!(diagnostics.items[0].severity, DiagnosticSeverity::Warning);
+ Ok(())
+ }
+
+ #[test]
+ fn research_nodes_default_to_off_path() -> Result<(), CoreError> {
+ let payload = NodePayload {
+ schema: None,
+ fields: JsonObject::from_iter([("topic".to_owned(), json!("ideas"))]),
+ };
+ let node = DagNode::new(
+ NodeClass::Research,
+ None,
+ NonEmptyText::new("feature scouting")?,
+ None,
+ payload,
+ super::NodeDiagnostics::admitted(),
+ );
+
+ assert!(!node.is_core_path());
+ Ok(())
+ }
+}
diff --git a/crates/fidget-spinner-store-sqlite/Cargo.toml b/crates/fidget-spinner-store-sqlite/Cargo.toml
new file mode 100644
index 0000000..54e0784
--- /dev/null
+++ b/crates/fidget-spinner-store-sqlite/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "fidget-spinner-store-sqlite"
+description = "Per-project SQLite store for the Fidget Spinner DAG spine"
+edition.workspace = true
+license.workspace = true
+publish = false
+rust-version.workspace = true
+version.workspace = true
+
+[dependencies]
+camino.workspace = true
+fidget-spinner-core = { path = "../fidget-spinner-core" }
+rusqlite.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+thiserror.workspace = true
+time.workspace = true
+uuid.workspace = true
+
+[lints]
+workspace = true
diff --git a/crates/fidget-spinner-store-sqlite/src/lib.rs b/crates/fidget-spinner-store-sqlite/src/lib.rs
new file mode 100644
index 0000000..7c129ab
--- /dev/null
+++ b/crates/fidget-spinner-store-sqlite/src/lib.rs
@@ -0,0 +1,1810 @@
+use std::fs;
+use std::io;
+use std::process::Command;
+
+use camino::{Utf8Path, Utf8PathBuf};
+use fidget_spinner_core::{
+ AnnotationVisibility, CheckpointDisposition, CheckpointRecord, CheckpointSnapshotRef,
+ CodeSnapshotRef, CommandRecipe, CompletedExperiment, DagEdge, DagNode, EdgeKind,
+ ExecutionBackend, ExperimentResult, FrontierContract, FrontierNote, FrontierProjection,
+ FrontierRecord, FrontierStatus, FrontierVerdict, GitCommitHash, JsonObject, MetricObservation,
+ MetricSpec, MetricUnit, NodeAnnotation, NodeClass, NodeDiagnostics, NodePayload, NonEmptyText,
+ OptimizationObjective, ProjectSchema, RunRecord, RunStatus,
+};
+use rusqlite::{Connection, OptionalExtension, Transaction, params};
+use serde::{Deserialize, Serialize};
+use serde_json::{Value, json};
+use thiserror::Error;
+use time::OffsetDateTime;
+use time::format_description::well_known::Rfc3339;
+use uuid::Uuid;
+
+pub const STORE_DIR_NAME: &str = ".fidget_spinner";
+pub const STATE_DB_NAME: &str = "state.sqlite";
+pub const PROJECT_CONFIG_NAME: &str = "project.json";
+pub const PROJECT_SCHEMA_NAME: &str = "schema.json";
+
+#[derive(Debug, Error)]
+pub enum StoreError {
+ #[error("project store is not initialized at {0}")]
+ MissingProjectStore(Utf8PathBuf),
+ #[error("I/O failure")]
+ Io(#[from] io::Error),
+ #[error("SQLite failure")]
+ Sql(#[from] rusqlite::Error),
+ #[error("JSON failure")]
+ Json(#[from] serde_json::Error),
+ #[error("time parse failure")]
+ TimeParse(#[from] time::error::Parse),
+ #[error("time format failure")]
+ TimeFormat(#[from] time::error::Format),
+ #[error("core domain failure")]
+ Core(#[from] fidget_spinner_core::CoreError),
+ #[error("UUID parse failure")]
+ Uuid(#[from] uuid::Error),
+ #[error("node {0} was not found")]
+ NodeNotFound(fidget_spinner_core::NodeId),
+ #[error("frontier {0} was not found")]
+ FrontierNotFound(fidget_spinner_core::FrontierId),
+ #[error("checkpoint {0} was not found")]
+ CheckpointNotFound(fidget_spinner_core::CheckpointId),
+ #[error("node {0} is not a change node")]
+ NodeNotChange(fidget_spinner_core::NodeId),
+ #[error("frontier {frontier_id} has no champion checkpoint")]
+ MissingChampionCheckpoint {
+ frontier_id: fidget_spinner_core::FrontierId,
+ },
+ #[error("git repository inspection failed for {0}")]
+ GitInspectionFailed(Utf8PathBuf),
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct ProjectConfig {
+ pub display_name: NonEmptyText,
+ pub created_at: OffsetDateTime,
+ pub store_format_version: u32,
+}
+
+impl ProjectConfig {
+ #[must_use]
+ pub fn new(display_name: NonEmptyText) -> Self {
+ Self {
+ display_name,
+ created_at: OffsetDateTime::now_utc(),
+ store_format_version: 1,
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct CreateNodeRequest {
+ pub class: NodeClass,
+ pub frontier_id: Option<fidget_spinner_core::FrontierId>,
+ pub title: NonEmptyText,
+ pub summary: Option<NonEmptyText>,
+ pub payload: NodePayload,
+ pub annotations: Vec<NodeAnnotation>,
+ pub attachments: Vec<EdgeAttachment>,
+}
+
+#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub enum EdgeAttachmentDirection {
+ ExistingToNew,
+ NewToExisting,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct EdgeAttachment {
+ pub node_id: fidget_spinner_core::NodeId,
+ pub kind: EdgeKind,
+ pub direction: EdgeAttachmentDirection,
+}
+
+impl EdgeAttachment {
+ #[must_use]
+ pub fn materialize(&self, new_node_id: fidget_spinner_core::NodeId) -> DagEdge {
+ match self.direction {
+ EdgeAttachmentDirection::ExistingToNew => DagEdge {
+ source_id: self.node_id,
+ target_id: new_node_id,
+ kind: self.kind,
+ },
+ EdgeAttachmentDirection::NewToExisting => DagEdge {
+ source_id: new_node_id,
+ target_id: self.node_id,
+ kind: self.kind,
+ },
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct ListNodesQuery {
+ pub frontier_id: Option<fidget_spinner_core::FrontierId>,
+ pub class: Option<NodeClass>,
+ pub include_archived: bool,
+ pub limit: u32,
+}
+
+impl Default for ListNodesQuery {
+ fn default() -> Self {
+ Self {
+ frontier_id: None,
+ class: None,
+ include_archived: false,
+ limit: 20,
+ }
+ }
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct NodeSummary {
+ pub id: fidget_spinner_core::NodeId,
+ pub class: NodeClass,
+ pub track: fidget_spinner_core::NodeTrack,
+ pub frontier_id: Option<fidget_spinner_core::FrontierId>,
+ pub archived: bool,
+ pub title: NonEmptyText,
+ pub summary: Option<NonEmptyText>,
+ pub diagnostic_count: u64,
+ pub hidden_annotation_count: u64,
+ pub created_at: OffsetDateTime,
+ pub updated_at: OffsetDateTime,
+}
+
+#[derive(Clone, Debug)]
+pub struct CreateFrontierRequest {
+ pub label: NonEmptyText,
+ pub contract_title: NonEmptyText,
+ pub contract_summary: Option<NonEmptyText>,
+ pub contract: FrontierContract,
+ pub initial_checkpoint: Option<CheckpointSeed>,
+}
+
+#[derive(Clone, Debug)]
+pub struct CheckpointSeed {
+ pub summary: NonEmptyText,
+ pub snapshot: CheckpointSnapshotRef,
+}
+
+#[derive(Clone, Debug)]
+pub struct CloseExperimentRequest {
+ pub frontier_id: fidget_spinner_core::FrontierId,
+ pub base_checkpoint_id: fidget_spinner_core::CheckpointId,
+ pub change_node_id: fidget_spinner_core::NodeId,
+ pub candidate_summary: NonEmptyText,
+ pub candidate_snapshot: CheckpointSnapshotRef,
+ pub run_title: NonEmptyText,
+ pub run_summary: Option<NonEmptyText>,
+ pub backend: ExecutionBackend,
+ pub benchmark_suite: NonEmptyText,
+ pub command: CommandRecipe,
+ pub code_snapshot: Option<CodeSnapshotRef>,
+ pub primary_metric: MetricObservation,
+ pub supporting_metrics: Vec<MetricObservation>,
+ pub note: FrontierNote,
+ pub verdict: FrontierVerdict,
+ pub decision_title: NonEmptyText,
+ pub decision_rationale: NonEmptyText,
+ pub analysis_node_id: Option<fidget_spinner_core::NodeId>,
+}
+
+#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
+pub struct ExperimentReceipt {
+ pub checkpoint: CheckpointRecord,
+ pub run_node: DagNode,
+ pub run: RunRecord,
+ pub decision_node: DagNode,
+ pub experiment: CompletedExperiment,
+}
+
+pub struct ProjectStore {
+ project_root: Utf8PathBuf,
+ state_root: Utf8PathBuf,
+ connection: Connection,
+ config: ProjectConfig,
+ schema: ProjectSchema,
+}
+
+impl ProjectStore {
+ pub fn init(
+ project_root: impl AsRef<Utf8Path>,
+ display_name: NonEmptyText,
+ schema_namespace: NonEmptyText,
+ ) -> Result<Self, StoreError> {
+ let project_root = project_root.as_ref().to_path_buf();
+ let state_root = state_root(&project_root);
+ fs::create_dir_all(state_root.join("blobs"))?;
+ let config = ProjectConfig::new(display_name);
+ write_json_file(&state_root.join(PROJECT_CONFIG_NAME), &config)?;
+ let schema = ProjectSchema::default_with_namespace(schema_namespace);
+ write_json_file(&state_root.join(PROJECT_SCHEMA_NAME), &schema)?;
+
+ let connection = Connection::open(state_root.join(STATE_DB_NAME).as_std_path())?;
+ migrate(&connection)?;
+
+ Ok(Self {
+ project_root,
+ state_root,
+ connection,
+ config,
+ schema,
+ })
+ }
+
+ pub fn open(project_root: impl AsRef<Utf8Path>) -> Result<Self, StoreError> {
+ let requested_root = project_root.as_ref().to_path_buf();
+ let project_root = discover_project_root(&requested_root)
+ .ok_or(StoreError::MissingProjectStore(requested_root))?;
+ let state_root = state_root(&project_root);
+ let config = read_json_file::<ProjectConfig>(&state_root.join(PROJECT_CONFIG_NAME))?;
+ let schema = read_json_file::<ProjectSchema>(&state_root.join(PROJECT_SCHEMA_NAME))?;
+ let connection = Connection::open(state_root.join(STATE_DB_NAME).as_std_path())?;
+ migrate(&connection)?;
+ Ok(Self {
+ project_root,
+ state_root,
+ connection,
+ config,
+ schema,
+ })
+ }
+
+ #[must_use]
+ pub fn config(&self) -> &ProjectConfig {
+ &self.config
+ }
+
+ #[must_use]
+ pub fn schema(&self) -> &ProjectSchema {
+ &self.schema
+ }
+
+ #[must_use]
+ pub fn project_root(&self) -> &Utf8Path {
+ &self.project_root
+ }
+
+ #[must_use]
+ pub fn state_root(&self) -> &Utf8Path {
+ &self.state_root
+ }
+
+ pub fn create_frontier(
+ &mut self,
+ request: CreateFrontierRequest,
+ ) -> Result<FrontierProjection, StoreError> {
+ let frontier_id = fidget_spinner_core::FrontierId::fresh();
+ let payload = NodePayload::with_schema(
+ self.schema.schema_ref(),
+ frontier_contract_payload(&request.contract)?,
+ );
+ let diagnostics = self.schema.validate_node(NodeClass::Contract, &payload);
+ let contract_node = DagNode::new(
+ NodeClass::Contract,
+ Some(frontier_id),
+ request.contract_title,
+ request.contract_summary,
+ payload,
+ diagnostics,
+ );
+ let frontier = FrontierRecord::with_id(frontier_id, request.label, contract_node.id);
+
+ let tx = self.connection.transaction()?;
+ insert_node(&tx, &contract_node)?;
+ insert_frontier(&tx, &frontier)?;
+ if let Some(seed) = request.initial_checkpoint {
+ let checkpoint = CheckpointRecord {
+ id: fidget_spinner_core::CheckpointId::fresh(),
+ frontier_id: frontier.id,
+ node_id: contract_node.id,
+ snapshot: seed.snapshot,
+ disposition: CheckpointDisposition::Champion,
+ summary: seed.summary,
+ created_at: OffsetDateTime::now_utc(),
+ };
+ insert_checkpoint(&tx, &checkpoint)?;
+ }
+ insert_event(
+ &tx,
+ "frontier",
+ &frontier.id.to_string(),
+ "frontier.created",
+ json!({"root_contract_node_id": contract_node.id}),
+ )?;
+ tx.commit()?;
+
+ self.frontier_projection(frontier.id)
+ }
+
+ pub fn add_node(&mut self, request: CreateNodeRequest) -> Result<DagNode, StoreError> {
+ let diagnostics = self.schema.validate_node(request.class, &request.payload);
+ let mut node = DagNode::new(
+ request.class,
+ request.frontier_id,
+ request.title,
+ request.summary,
+ request.payload,
+ diagnostics,
+ );
+ node.annotations = request.annotations;
+
+ let tx = self.connection.transaction()?;
+ insert_node(&tx, &node)?;
+ for attachment in &request.attachments {
+ insert_edge(&tx, &attachment.materialize(node.id))?;
+ }
+ insert_event(
+ &tx,
+ "node",
+ &node.id.to_string(),
+ "node.created",
+ json!({"class": node.class.as_str(), "frontier_id": node.frontier_id}),
+ )?;
+ tx.commit()?;
+ Ok(node)
+ }
+
+ pub fn archive_node(&mut self, node_id: fidget_spinner_core::NodeId) -> Result<(), StoreError> {
+ let updated_at = encode_timestamp(OffsetDateTime::now_utc())?;
+ let changed = self.connection.execute(
+ "UPDATE nodes SET archived = 1, updated_at = ?1 WHERE id = ?2",
+ params![updated_at, node_id.to_string()],
+ )?;
+ if changed == 0 {
+ return Err(StoreError::NodeNotFound(node_id));
+ }
+ Ok(())
+ }
+
+ pub fn annotate_node(
+ &mut self,
+ node_id: fidget_spinner_core::NodeId,
+ annotation: NodeAnnotation,
+ ) -> Result<(), StoreError> {
+ let tx = self.connection.transaction()?;
+ let exists = tx
+ .query_row(
+ "SELECT 1 FROM nodes WHERE id = ?1",
+ params![node_id.to_string()],
+ |row| row.get::<_, i64>(0),
+ )
+ .optional()?;
+ if exists.is_none() {
+ return Err(StoreError::NodeNotFound(node_id));
+ }
+ insert_annotation(&tx, node_id, &annotation)?;
+ let _ = tx.execute(
+ "UPDATE nodes SET updated_at = ?1 WHERE id = ?2",
+ params![
+ encode_timestamp(OffsetDateTime::now_utc())?,
+ node_id.to_string()
+ ],
+ )?;
+ insert_event(
+ &tx,
+ "node",
+ &node_id.to_string(),
+ "node.annotated",
+ json!({"visibility": format!("{:?}", annotation.visibility)}),
+ )?;
+ tx.commit()?;
+ Ok(())
+ }
+
+ pub fn get_node(
+ &self,
+ node_id: fidget_spinner_core::NodeId,
+ ) -> Result<Option<DagNode>, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT
+ id,
+ class,
+ track,
+ frontier_id,
+ archived,
+ title,
+ summary,
+ payload_schema_namespace,
+ payload_schema_version,
+ payload_json,
+ diagnostics_json,
+ agent_session_id,
+ created_at,
+ updated_at
+ FROM nodes
+ WHERE id = ?1",
+ )?;
+ let node = statement
+ .query_row(params![node_id.to_string()], read_node_row)
+ .optional()?;
+ node.map(|mut item| {
+ item.annotations = self.load_annotations(item.id)?;
+ Ok(item)
+ })
+ .transpose()
+ }
+
+ pub fn list_nodes(&self, query: ListNodesQuery) -> Result<Vec<NodeSummary>, StoreError> {
+ let frontier_id = query.frontier_id.map(|id| id.to_string());
+ let class = query.class.map(|item| item.as_str().to_owned());
+ let limit = i64::from(query.limit);
+ let mut statement = self.connection.prepare(
+ "SELECT
+ n.id,
+ n.class,
+ n.track,
+ n.frontier_id,
+ n.archived,
+ n.title,
+ n.summary,
+ n.diagnostics_json,
+ n.created_at,
+ n.updated_at,
+ (
+ SELECT COUNT(*)
+ FROM node_annotations AS a
+ WHERE a.node_id = n.id AND a.visibility = 'hidden'
+ ) AS hidden_annotation_count
+ FROM nodes AS n
+ WHERE (?1 IS NULL OR n.frontier_id = ?1)
+ AND (?2 IS NULL OR n.class = ?2)
+ AND (?3 = 1 OR n.archived = 0)
+ ORDER BY n.updated_at DESC
+ LIMIT ?4",
+ )?;
+ let mut rows = statement.query(params![
+ frontier_id,
+ class,
+ i64::from(query.include_archived),
+ limit
+ ])?;
+ let mut items = Vec::new();
+ while let Some(row) = rows.next()? {
+ let diagnostics = decode_json::<NodeDiagnostics>(&row.get::<_, String>(7)?)?;
+ items.push(NodeSummary {
+ id: parse_node_id(&row.get::<_, String>(0)?)?,
+ class: parse_node_class(&row.get::<_, String>(1)?)?,
+ track: parse_node_track(&row.get::<_, String>(2)?)?,
+ frontier_id: row
+ .get::<_, Option<String>>(3)?
+ .map(|raw| parse_frontier_id(&raw))
+ .transpose()?,
+ archived: row.get::<_, i64>(4)? != 0,
+ title: NonEmptyText::new(row.get::<_, String>(5)?)?,
+ summary: row
+ .get::<_, Option<String>>(6)?
+ .map(NonEmptyText::new)
+ .transpose()?,
+ diagnostic_count: diagnostics.items.len() as u64,
+ hidden_annotation_count: row.get::<_, i64>(10)? as u64,
+ created_at: decode_timestamp(&row.get::<_, String>(8)?)?,
+ updated_at: decode_timestamp(&row.get::<_, String>(9)?)?,
+ });
+ }
+ Ok(items)
+ }
+
+ pub fn list_frontiers(&self) -> Result<Vec<FrontierRecord>, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT id, label, root_contract_node_id, status, created_at, updated_at
+ FROM frontiers
+ ORDER BY updated_at DESC",
+ )?;
+ let mut rows = statement.query([])?;
+ let mut items = Vec::new();
+ while let Some(row) = rows.next()? {
+ items.push(read_frontier_row(row)?);
+ }
+ Ok(items)
+ }
+
+ pub fn frontier_projection(
+ &self,
+ frontier_id: fidget_spinner_core::FrontierId,
+ ) -> Result<FrontierProjection, StoreError> {
+ let frontier = self.load_frontier(frontier_id)?;
+ let mut champion_checkpoint_id = None;
+ let mut candidate_checkpoint_ids = std::collections::BTreeSet::new();
+
+ let mut statement = self.connection.prepare(
+ "SELECT id, disposition
+ FROM checkpoints
+ WHERE frontier_id = ?1",
+ )?;
+ let mut rows = statement.query(params![frontier_id.to_string()])?;
+ while let Some(row) = rows.next()? {
+ let checkpoint_id = parse_checkpoint_id(&row.get::<_, String>(0)?)?;
+ match parse_checkpoint_disposition(&row.get::<_, String>(1)?)? {
+ CheckpointDisposition::Champion => champion_checkpoint_id = Some(checkpoint_id),
+ CheckpointDisposition::FrontierCandidate => {
+ let _ = candidate_checkpoint_ids.insert(checkpoint_id);
+ }
+ CheckpointDisposition::Baseline
+ | CheckpointDisposition::DeadEnd
+ | CheckpointDisposition::Archived => {}
+ }
+ }
+ let experiment_count = self.connection.query_row(
+ "SELECT COUNT(*) FROM experiments WHERE frontier_id = ?1",
+ params![frontier_id.to_string()],
+ |row| row.get::<_, i64>(0),
+ )? as u64;
+
+ Ok(FrontierProjection {
+ frontier,
+ champion_checkpoint_id,
+ candidate_checkpoint_ids,
+ experiment_count,
+ })
+ }
+
+ pub fn load_checkpoint(
+ &self,
+ checkpoint_id: fidget_spinner_core::CheckpointId,
+ ) -> Result<Option<CheckpointRecord>, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT
+ id,
+ frontier_id,
+ node_id,
+ repo_root,
+ worktree_root,
+ worktree_name,
+ commit_hash,
+ disposition,
+ summary,
+ created_at
+ FROM checkpoints
+ WHERE id = ?1",
+ )?;
+ statement
+ .query_row(params![checkpoint_id.to_string()], |row| {
+ read_checkpoint_row(row)
+ })
+ .optional()
+ .map_err(StoreError::from)
+ }
+
+ pub fn close_experiment(
+ &mut self,
+ request: CloseExperimentRequest,
+ ) -> Result<ExperimentReceipt, StoreError> {
+ let change_node = self
+ .get_node(request.change_node_id)?
+ .ok_or(StoreError::NodeNotFound(request.change_node_id))?;
+ if change_node.class != NodeClass::Change {
+ return Err(StoreError::NodeNotChange(request.change_node_id));
+ }
+ if change_node.frontier_id != Some(request.frontier_id) {
+ return Err(StoreError::FrontierNotFound(request.frontier_id));
+ }
+ let base_checkpoint = self
+ .load_checkpoint(request.base_checkpoint_id)?
+ .ok_or(StoreError::CheckpointNotFound(request.base_checkpoint_id))?;
+ if base_checkpoint.frontier_id != request.frontier_id {
+ return Err(StoreError::CheckpointNotFound(request.base_checkpoint_id));
+ }
+
+ let run_payload = NodePayload::with_schema(
+ self.schema.schema_ref(),
+ json_object(json!({
+ "benchmark_suite": request.benchmark_suite.as_str(),
+ "backend": format!("{:?}", request.backend),
+ "command": request.command.argv.iter().map(NonEmptyText::as_str).collect::<Vec<_>>(),
+ }))?,
+ );
+ let run_diagnostics = self.schema.validate_node(NodeClass::Run, &run_payload);
+ let run_node = DagNode::new(
+ NodeClass::Run,
+ Some(request.frontier_id),
+ request.run_title,
+ request.run_summary,
+ run_payload,
+ run_diagnostics,
+ );
+ let run_id = fidget_spinner_core::RunId::fresh();
+ let now = OffsetDateTime::now_utc();
+ let run = RunRecord {
+ node_id: run_node.id,
+ run_id,
+ frontier_id: Some(request.frontier_id),
+ status: RunStatus::Succeeded,
+ backend: request.backend,
+ code_snapshot: request.code_snapshot,
+ benchmark_suite: Some(request.benchmark_suite.clone()),
+ command: request.command,
+ started_at: Some(now),
+ finished_at: Some(now),
+ };
+
+ let decision_payload = NodePayload::with_schema(
+ self.schema.schema_ref(),
+ json_object(json!({
+ "verdict": format!("{:?}", request.verdict),
+ "rationale": request.decision_rationale.as_str(),
+ }))?,
+ );
+ let decision_diagnostics = self
+ .schema
+ .validate_node(NodeClass::Decision, &decision_payload);
+ let decision_node = DagNode::new(
+ NodeClass::Decision,
+ Some(request.frontier_id),
+ request.decision_title,
+ Some(request.decision_rationale.clone()),
+ decision_payload,
+ decision_diagnostics,
+ );
+
+ let checkpoint = CheckpointRecord {
+ id: fidget_spinner_core::CheckpointId::fresh(),
+ frontier_id: request.frontier_id,
+ node_id: run_node.id,
+ snapshot: request.candidate_snapshot,
+ disposition: match request.verdict {
+ FrontierVerdict::PromoteToChampion => CheckpointDisposition::Champion,
+ FrontierVerdict::KeepOnFrontier | FrontierVerdict::NeedsMoreEvidence => {
+ CheckpointDisposition::FrontierCandidate
+ }
+ FrontierVerdict::RevertToChampion => CheckpointDisposition::DeadEnd,
+ FrontierVerdict::ArchiveDeadEnd => CheckpointDisposition::Archived,
+ },
+ summary: request.candidate_summary,
+ created_at: now,
+ };
+
+ let experiment = CompletedExperiment {
+ id: fidget_spinner_core::ExperimentId::fresh(),
+ frontier_id: request.frontier_id,
+ base_checkpoint_id: request.base_checkpoint_id,
+ candidate_checkpoint_id: checkpoint.id,
+ change_node_id: request.change_node_id,
+ run_node_id: run_node.id,
+ run_id,
+ analysis_node_id: request.analysis_node_id,
+ decision_node_id: decision_node.id,
+ result: ExperimentResult {
+ benchmark_suite: request.benchmark_suite,
+ primary_metric: request.primary_metric,
+ supporting_metrics: request.supporting_metrics,
+ benchmark_bundle: None,
+ },
+ note: request.note,
+ verdict: request.verdict,
+ created_at: now,
+ };
+
+ let tx = self.connection.transaction()?;
+ insert_node(&tx, &run_node)?;
+ insert_node(&tx, &decision_node)?;
+ insert_edge(
+ &tx,
+ &DagEdge {
+ source_id: request.change_node_id,
+ target_id: run_node.id,
+ kind: EdgeKind::Lineage,
+ },
+ )?;
+ insert_edge(
+ &tx,
+ &DagEdge {
+ source_id: run_node.id,
+ target_id: decision_node.id,
+ kind: EdgeKind::Evidence,
+ },
+ )?;
+ insert_run(
+ &tx,
+ &run,
+ &experiment.result.primary_metric,
+ &experiment.result.supporting_metrics,
+ )?;
+ match request.verdict {
+ FrontierVerdict::PromoteToChampion => {
+ demote_previous_champion(&tx, request.frontier_id)?;
+ }
+ FrontierVerdict::KeepOnFrontier
+ | FrontierVerdict::NeedsMoreEvidence
+ | FrontierVerdict::RevertToChampion
+ | FrontierVerdict::ArchiveDeadEnd => {}
+ }
+ insert_checkpoint(&tx, &checkpoint)?;
+ insert_experiment(&tx, &experiment)?;
+ touch_frontier(&tx, request.frontier_id)?;
+ insert_event(
+ &tx,
+ "experiment",
+ &experiment.id.to_string(),
+ "experiment.closed",
+ json!({
+ "frontier_id": request.frontier_id,
+ "verdict": format!("{:?}", request.verdict),
+ "candidate_checkpoint_id": checkpoint.id,
+ }),
+ )?;
+ tx.commit()?;
+
+ Ok(ExperimentReceipt {
+ checkpoint,
+ run_node,
+ run,
+ decision_node,
+ experiment,
+ })
+ }
+
+ pub fn auto_capture_checkpoint(
+ &self,
+ summary: NonEmptyText,
+ ) -> Result<Option<CheckpointSeed>, StoreError> {
+ auto_capture_checkpoint_seed(&self.project_root, summary)
+ }
+
+ fn load_annotations(
+ &self,
+ node_id: fidget_spinner_core::NodeId,
+ ) -> Result<Vec<NodeAnnotation>, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT id, visibility, label, body, created_at
+ FROM node_annotations
+ WHERE node_id = ?1
+ ORDER BY created_at ASC",
+ )?;
+ let mut rows = statement.query(params![node_id.to_string()])?;
+ let mut items = Vec::new();
+ while let Some(row) = rows.next()? {
+ items.push(NodeAnnotation {
+ id: parse_annotation_id(&row.get::<_, String>(0)?)?,
+ visibility: parse_annotation_visibility(&row.get::<_, String>(1)?)?,
+ label: row
+ .get::<_, Option<String>>(2)?
+ .map(NonEmptyText::new)
+ .transpose()?,
+ body: NonEmptyText::new(row.get::<_, String>(3)?)?,
+ created_at: decode_timestamp(&row.get::<_, String>(4)?)?,
+ });
+ }
+ Ok(items)
+ }
+
+ fn load_frontier(
+ &self,
+ frontier_id: fidget_spinner_core::FrontierId,
+ ) -> Result<FrontierRecord, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT id, label, root_contract_node_id, status, created_at, updated_at
+ FROM frontiers
+ WHERE id = ?1",
+ )?;
+ let frontier = statement
+ .query_row(params![frontier_id.to_string()], |row| {
+ read_frontier_row(row).map_err(to_sql_conversion_error)
+ })
+ .optional()?;
+ frontier.ok_or(StoreError::FrontierNotFound(frontier_id))
+ }
+}
+
+fn migrate(connection: &Connection) -> Result<(), StoreError> {
+ connection.execute_batch(
+ "
+ PRAGMA foreign_keys = ON;
+
+ CREATE TABLE IF NOT EXISTS nodes (
+ id TEXT PRIMARY KEY,
+ class TEXT NOT NULL,
+ track TEXT NOT NULL,
+ frontier_id TEXT,
+ archived INTEGER NOT NULL,
+ title TEXT NOT NULL,
+ summary TEXT,
+ payload_schema_namespace TEXT,
+ payload_schema_version INTEGER,
+ payload_json TEXT NOT NULL,
+ diagnostics_json TEXT NOT NULL,
+ agent_session_id TEXT,
+ created_at TEXT NOT NULL,
+ updated_at TEXT NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS node_annotations (
+ id TEXT PRIMARY KEY,
+ node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
+ visibility TEXT NOT NULL,
+ label TEXT,
+ body TEXT NOT NULL,
+ created_at TEXT NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS node_edges (
+ source_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
+ target_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
+ kind TEXT NOT NULL,
+ PRIMARY KEY (source_id, target_id, kind)
+ );
+
+ CREATE TABLE IF NOT EXISTS frontiers (
+ id TEXT PRIMARY KEY,
+ label TEXT NOT NULL,
+ root_contract_node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE RESTRICT,
+ status TEXT NOT NULL,
+ created_at TEXT NOT NULL,
+ updated_at TEXT NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS checkpoints (
+ id TEXT PRIMARY KEY,
+ frontier_id TEXT NOT NULL REFERENCES frontiers(id) ON DELETE CASCADE,
+ node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE RESTRICT,
+ repo_root TEXT NOT NULL,
+ worktree_root TEXT NOT NULL,
+ worktree_name TEXT,
+ commit_hash TEXT NOT NULL,
+ disposition TEXT NOT NULL,
+ summary TEXT NOT NULL,
+ created_at TEXT NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS runs (
+ run_id TEXT PRIMARY KEY,
+ node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
+ frontier_id TEXT REFERENCES frontiers(id) ON DELETE SET NULL,
+ status TEXT NOT NULL,
+ backend TEXT NOT NULL,
+ repo_root TEXT,
+ worktree_root TEXT,
+ worktree_name TEXT,
+ head_commit TEXT,
+ dirty_paths_json TEXT,
+ benchmark_suite TEXT,
+ working_directory TEXT NOT NULL,
+ argv_json TEXT NOT NULL,
+ env_json TEXT NOT NULL,
+ started_at TEXT,
+ finished_at TEXT
+ );
+
+ CREATE TABLE IF NOT EXISTS metrics (
+ run_id TEXT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE,
+ metric_key TEXT NOT NULL,
+ unit TEXT NOT NULL,
+ objective TEXT NOT NULL,
+ value REAL NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS experiments (
+ id TEXT PRIMARY KEY,
+ frontier_id TEXT NOT NULL REFERENCES frontiers(id) ON DELETE CASCADE,
+ base_checkpoint_id TEXT NOT NULL REFERENCES checkpoints(id) ON DELETE RESTRICT,
+ candidate_checkpoint_id TEXT NOT NULL REFERENCES checkpoints(id) ON DELETE RESTRICT,
+ change_node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE RESTRICT,
+ run_node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE RESTRICT,
+ run_id TEXT NOT NULL REFERENCES runs(run_id) ON DELETE RESTRICT,
+ analysis_node_id TEXT REFERENCES nodes(id) ON DELETE RESTRICT,
+ decision_node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE RESTRICT,
+ benchmark_suite TEXT NOT NULL,
+ primary_metric_json TEXT NOT NULL,
+ supporting_metrics_json TEXT NOT NULL,
+ note_summary TEXT NOT NULL,
+ note_next_json TEXT NOT NULL,
+ verdict TEXT NOT NULL,
+ created_at TEXT NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS events (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ entity_kind TEXT NOT NULL,
+ entity_id TEXT NOT NULL,
+ event_kind TEXT NOT NULL,
+ payload_json TEXT NOT NULL,
+ created_at TEXT NOT NULL
+ );
+ ",
+ )?;
+ Ok(())
+}
+
+fn insert_node(tx: &Transaction<'_>, node: &DagNode) -> Result<(), StoreError> {
+ let schema_namespace = node
+ .payload
+ .schema
+ .as_ref()
+ .map(|schema| schema.namespace.as_str());
+ let schema_version = node
+ .payload
+ .schema
+ .as_ref()
+ .map(|schema| i64::from(schema.version));
+ let _ = tx.execute(
+ "INSERT INTO nodes (
+ id,
+ class,
+ track,
+ frontier_id,
+ archived,
+ title,
+ summary,
+ payload_schema_namespace,
+ payload_schema_version,
+ payload_json,
+ diagnostics_json,
+ agent_session_id,
+ created_at,
+ updated_at
+ ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14)",
+ params![
+ node.id.to_string(),
+ node.class.as_str(),
+ encode_node_track(node.track),
+ node.frontier_id.map(|id| id.to_string()),
+ i64::from(node.archived),
+ node.title.as_str(),
+ node.summary.as_ref().map(NonEmptyText::as_str),
+ schema_namespace,
+ schema_version,
+ encode_json(&node.payload)?,
+ encode_json(&node.diagnostics)?,
+ node.agent_session_id.map(|id| id.to_string()),
+ encode_timestamp(node.created_at)?,
+ encode_timestamp(node.updated_at)?,
+ ],
+ )?;
+ for annotation in &node.annotations {
+ insert_annotation(tx, node.id, annotation)?;
+ }
+ Ok(())
+}
+
+fn insert_annotation(
+ tx: &Transaction<'_>,
+ node_id: fidget_spinner_core::NodeId,
+ annotation: &NodeAnnotation,
+) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "INSERT INTO node_annotations (id, node_id, visibility, label, body, created_at)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
+ params![
+ annotation.id.to_string(),
+ node_id.to_string(),
+ encode_annotation_visibility(annotation.visibility),
+ annotation.label.as_ref().map(NonEmptyText::as_str),
+ annotation.body.as_str(),
+ encode_timestamp(annotation.created_at)?,
+ ],
+ )?;
+ Ok(())
+}
+
+fn insert_edge(tx: &Transaction<'_>, edge: &DagEdge) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "INSERT OR IGNORE INTO node_edges (source_id, target_id, kind)
+ VALUES (?1, ?2, ?3)",
+ params![
+ edge.source_id.to_string(),
+ edge.target_id.to_string(),
+ encode_edge_kind(edge.kind),
+ ],
+ )?;
+ Ok(())
+}
+
+fn insert_frontier(tx: &Transaction<'_>, frontier: &FrontierRecord) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "INSERT INTO frontiers (id, label, root_contract_node_id, status, created_at, updated_at)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
+ params![
+ frontier.id.to_string(),
+ frontier.label.as_str(),
+ frontier.root_contract_node_id.to_string(),
+ encode_frontier_status(frontier.status),
+ encode_timestamp(frontier.created_at)?,
+ encode_timestamp(frontier.updated_at)?,
+ ],
+ )?;
+ Ok(())
+}
+
+fn insert_checkpoint(
+ tx: &Transaction<'_>,
+ checkpoint: &CheckpointRecord,
+) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "INSERT INTO checkpoints (
+ id,
+ frontier_id,
+ node_id,
+ repo_root,
+ worktree_root,
+ worktree_name,
+ commit_hash,
+ disposition,
+ summary,
+ created_at
+ ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)",
+ params![
+ checkpoint.id.to_string(),
+ checkpoint.frontier_id.to_string(),
+ checkpoint.node_id.to_string(),
+ checkpoint.snapshot.repo_root.as_str(),
+ checkpoint.snapshot.worktree_root.as_str(),
+ checkpoint
+ .snapshot
+ .worktree_name
+ .as_ref()
+ .map(NonEmptyText::as_str),
+ checkpoint.snapshot.commit_hash.as_str(),
+ encode_checkpoint_disposition(checkpoint.disposition),
+ checkpoint.summary.as_str(),
+ encode_timestamp(checkpoint.created_at)?,
+ ],
+ )?;
+ Ok(())
+}
+
+fn insert_run(
+ tx: &Transaction<'_>,
+ run: &RunRecord,
+ primary_metric: &MetricObservation,
+ supporting_metrics: &[MetricObservation],
+) -> Result<(), StoreError> {
+ let (repo_root, worktree_root, worktree_name, head_commit, dirty_paths) = run
+ .code_snapshot
+ .as_ref()
+ .map_or((None, None, None, None, None), |snapshot| {
+ (
+ Some(snapshot.repo_root.as_str().to_owned()),
+ Some(snapshot.worktree_root.as_str().to_owned()),
+ snapshot.worktree_name.as_ref().map(ToOwned::to_owned),
+ snapshot.head_commit.as_ref().map(ToOwned::to_owned),
+ Some(
+ snapshot
+ .dirty_paths
+ .iter()
+ .map(ToOwned::to_owned)
+ .collect::<Vec<_>>(),
+ ),
+ )
+ });
+ let dirty_paths_json = match dirty_paths.as_ref() {
+ Some(paths) => Some(encode_json(paths)?),
+ None => None,
+ };
+ let started_at = match run.started_at {
+ Some(timestamp) => Some(encode_timestamp(timestamp)?),
+ None => None,
+ };
+ let finished_at = match run.finished_at {
+ Some(timestamp) => Some(encode_timestamp(timestamp)?),
+ None => None,
+ };
+ let _ = tx.execute(
+ "INSERT INTO runs (
+ run_id,
+ node_id,
+ frontier_id,
+ status,
+ backend,
+ repo_root,
+ worktree_root,
+ worktree_name,
+ head_commit,
+ dirty_paths_json,
+ benchmark_suite,
+ working_directory,
+ argv_json,
+ env_json,
+ started_at,
+ finished_at
+ ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16)",
+ params![
+ run.run_id.to_string(),
+ run.node_id.to_string(),
+ run.frontier_id.map(|id| id.to_string()),
+ encode_run_status(run.status),
+ encode_backend(run.backend),
+ repo_root,
+ worktree_root,
+ worktree_name.map(|item| item.to_string()),
+ head_commit.map(|item| item.to_string()),
+ dirty_paths_json,
+ run.benchmark_suite.as_ref().map(NonEmptyText::as_str),
+ run.command.working_directory.as_str(),
+ encode_json(&run.command.argv)?,
+ encode_json(&run.command.env)?,
+ started_at,
+ finished_at,
+ ],
+ )?;
+
+ for metric in std::iter::once(primary_metric).chain(supporting_metrics.iter()) {
+ let _ = tx.execute(
+ "INSERT INTO metrics (run_id, metric_key, unit, objective, value)
+ VALUES (?1, ?2, ?3, ?4, ?5)",
+ params![
+ run.run_id.to_string(),
+ metric.metric_key.as_str(),
+ encode_metric_unit(metric.unit),
+ encode_optimization_objective(metric.objective),
+ metric.value,
+ ],
+ )?;
+ }
+ Ok(())
+}
+
+fn insert_experiment(
+ tx: &Transaction<'_>,
+ experiment: &CompletedExperiment,
+) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "INSERT INTO experiments (
+ id,
+ frontier_id,
+ base_checkpoint_id,
+ candidate_checkpoint_id,
+ change_node_id,
+ run_node_id,
+ run_id,
+ analysis_node_id,
+ decision_node_id,
+ benchmark_suite,
+ primary_metric_json,
+ supporting_metrics_json,
+ note_summary,
+ note_next_json,
+ verdict,
+ created_at
+ ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16)",
+ params![
+ experiment.id.to_string(),
+ experiment.frontier_id.to_string(),
+ experiment.base_checkpoint_id.to_string(),
+ experiment.candidate_checkpoint_id.to_string(),
+ experiment.change_node_id.to_string(),
+ experiment.run_node_id.to_string(),
+ experiment.run_id.to_string(),
+ experiment.analysis_node_id.map(|id| id.to_string()),
+ experiment.decision_node_id.to_string(),
+ experiment.result.benchmark_suite.as_str(),
+ encode_json(&experiment.result.primary_metric)?,
+ encode_json(&experiment.result.supporting_metrics)?,
+ experiment.note.summary.as_str(),
+ encode_json(&experiment.note.next_hypotheses)?,
+ encode_frontier_verdict(experiment.verdict),
+ encode_timestamp(experiment.created_at)?,
+ ],
+ )?;
+ Ok(())
+}
+
+fn insert_event(
+ tx: &Transaction<'_>,
+ entity_kind: &str,
+ entity_id: &str,
+ event_kind: &str,
+ payload: Value,
+) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "INSERT INTO events (entity_kind, entity_id, event_kind, payload_json, created_at)
+ VALUES (?1, ?2, ?3, ?4, ?5)",
+ params![
+ entity_kind,
+ entity_id,
+ event_kind,
+ payload.to_string(),
+ encode_timestamp(OffsetDateTime::now_utc())?,
+ ],
+ )?;
+ Ok(())
+}
+
+fn touch_frontier(
+ tx: &Transaction<'_>,
+ frontier_id: fidget_spinner_core::FrontierId,
+) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "UPDATE frontiers SET updated_at = ?1 WHERE id = ?2",
+ params![
+ encode_timestamp(OffsetDateTime::now_utc())?,
+ frontier_id.to_string()
+ ],
+ )?;
+ Ok(())
+}
+
+fn demote_previous_champion(
+ tx: &Transaction<'_>,
+ frontier_id: fidget_spinner_core::FrontierId,
+) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "UPDATE checkpoints
+ SET disposition = 'baseline'
+ WHERE frontier_id = ?1 AND disposition = 'champion'",
+ params![frontier_id.to_string()],
+ )?;
+ Ok(())
+}
+
+fn read_node_row(row: &rusqlite::Row<'_>) -> Result<DagNode, rusqlite::Error> {
+ let payload_json = row.get::<_, String>(9)?;
+ let diagnostics_json = row.get::<_, String>(10)?;
+ let payload = decode_json::<NodePayload>(&payload_json).map_err(to_sql_conversion_error)?;
+ let diagnostics =
+ decode_json::<NodeDiagnostics>(&diagnostics_json).map_err(to_sql_conversion_error)?;
+ Ok(DagNode {
+ id: parse_node_id(&row.get::<_, String>(0)?).map_err(to_sql_conversion_error)?,
+ class: parse_node_class(&row.get::<_, String>(1)?).map_err(to_sql_conversion_error)?,
+ track: parse_node_track(&row.get::<_, String>(2)?).map_err(to_sql_conversion_error)?,
+ frontier_id: row
+ .get::<_, Option<String>>(3)?
+ .map(|raw| parse_frontier_id(&raw))
+ .transpose()
+ .map_err(to_sql_conversion_error)?,
+ archived: row.get::<_, i64>(4)? != 0,
+ title: NonEmptyText::new(row.get::<_, String>(5)?).map_err(core_to_sql_conversion_error)?,
+ summary: row
+ .get::<_, Option<String>>(6)?
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(core_to_sql_conversion_error)?,
+ payload,
+ annotations: Vec::new(),
+ diagnostics,
+ agent_session_id: row
+ .get::<_, Option<String>>(11)?
+ .map(|raw| parse_agent_session_id(&raw))
+ .transpose()
+ .map_err(to_sql_conversion_error)?,
+ created_at: decode_timestamp(&row.get::<_, String>(12)?)
+ .map_err(to_sql_conversion_error)?,
+ updated_at: decode_timestamp(&row.get::<_, String>(13)?)
+ .map_err(to_sql_conversion_error)?,
+ })
+}
+
+fn read_frontier_row(row: &rusqlite::Row<'_>) -> Result<FrontierRecord, StoreError> {
+ Ok(FrontierRecord {
+ id: parse_frontier_id(&row.get::<_, String>(0)?)?,
+ label: NonEmptyText::new(row.get::<_, String>(1)?)?,
+ root_contract_node_id: parse_node_id(&row.get::<_, String>(2)?)?,
+ status: parse_frontier_status(&row.get::<_, String>(3)?)?,
+ created_at: decode_timestamp(&row.get::<_, String>(4)?)?,
+ updated_at: decode_timestamp(&row.get::<_, String>(5)?)?,
+ })
+}
+
+fn read_checkpoint_row(row: &rusqlite::Row<'_>) -> Result<CheckpointRecord, rusqlite::Error> {
+ Ok(CheckpointRecord {
+ id: parse_checkpoint_id(&row.get::<_, String>(0)?).map_err(to_sql_conversion_error)?,
+ frontier_id: parse_frontier_id(&row.get::<_, String>(1)?)
+ .map_err(to_sql_conversion_error)?,
+ node_id: parse_node_id(&row.get::<_, String>(2)?).map_err(to_sql_conversion_error)?,
+ snapshot: CheckpointSnapshotRef {
+ repo_root: Utf8PathBuf::from(row.get::<_, String>(3)?),
+ worktree_root: Utf8PathBuf::from(row.get::<_, String>(4)?),
+ worktree_name: row
+ .get::<_, Option<String>>(5)?
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(core_to_sql_conversion_error)?,
+ commit_hash: GitCommitHash::new(row.get::<_, String>(6)?)
+ .map_err(core_to_sql_conversion_error)?,
+ },
+ disposition: parse_checkpoint_disposition(&row.get::<_, String>(7)?)
+ .map_err(to_sql_conversion_error)?,
+ summary: NonEmptyText::new(row.get::<_, String>(8)?)
+ .map_err(core_to_sql_conversion_error)?,
+ created_at: decode_timestamp(&row.get::<_, String>(9)?).map_err(to_sql_conversion_error)?,
+ })
+}
+
+fn frontier_contract_payload(contract: &FrontierContract) -> Result<JsonObject, StoreError> {
+ json_object(json!({
+ "objective": contract.objective.as_str(),
+ "benchmark_suites": contract
+ .evaluation
+ .benchmark_suites
+ .iter()
+ .map(NonEmptyText::as_str)
+ .collect::<Vec<_>>(),
+ "primary_metric": metric_spec_json(&contract.evaluation.primary_metric),
+ "supporting_metrics": contract
+ .evaluation
+ .supporting_metrics
+ .iter()
+ .map(metric_spec_json)
+ .collect::<Vec<_>>(),
+ "promotion_criteria": contract
+ .promotion_criteria
+ .iter()
+ .map(NonEmptyText::as_str)
+ .collect::<Vec<_>>(),
+ }))
+}
+
+fn metric_spec_json(metric: &MetricSpec) -> Value {
+ json!({
+ "metric_key": metric.metric_key.as_str(),
+ "unit": encode_metric_unit(metric.unit),
+ "objective": encode_optimization_objective(metric.objective),
+ })
+}
+
+fn json_object(value: Value) -> Result<JsonObject, StoreError> {
+ match value {
+ Value::Object(map) => Ok(map),
+ other => Err(StoreError::Json(serde_json::Error::io(io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!("expected JSON object, got {other:?}"),
+ )))),
+ }
+}
+
+fn write_json_file<T: Serialize>(path: &Utf8Path, value: &T) -> Result<(), StoreError> {
+ let serialized = serde_json::to_string_pretty(value)?;
+ fs::write(path.as_std_path(), serialized)?;
+ Ok(())
+}
+
+fn read_json_file<T: for<'de> Deserialize<'de>>(path: &Utf8Path) -> Result<T, StoreError> {
+ let bytes = fs::read(path.as_std_path())?;
+ serde_json::from_slice(&bytes).map_err(StoreError::from)
+}
+
+fn encode_json<T: Serialize>(value: &T) -> Result<String, StoreError> {
+ serde_json::to_string(value).map_err(StoreError::from)
+}
+
+fn decode_json<T: for<'de> Deserialize<'de>>(raw: &str) -> Result<T, StoreError> {
+ serde_json::from_str(raw).map_err(StoreError::from)
+}
+
+fn encode_timestamp(timestamp: OffsetDateTime) -> Result<String, StoreError> {
+ timestamp.format(&Rfc3339).map_err(StoreError::from)
+}
+
+fn decode_timestamp(raw: &str) -> Result<OffsetDateTime, StoreError> {
+ OffsetDateTime::parse(raw, &Rfc3339).map_err(StoreError::from)
+}
+
+fn state_root(project_root: &Utf8Path) -> Utf8PathBuf {
+ project_root.join(STORE_DIR_NAME)
+}
+
+#[must_use]
+pub fn discover_project_root(path: impl AsRef<Utf8Path>) -> Option<Utf8PathBuf> {
+ let mut cursor = discovery_start(path.as_ref());
+ loop {
+ if state_root(&cursor).exists() {
+ return Some(cursor);
+ }
+ let parent = cursor.parent()?;
+ cursor = parent.to_path_buf();
+ }
+}
+
+fn discovery_start(path: &Utf8Path) -> Utf8PathBuf {
+ match fs::metadata(path.as_std_path()) {
+ Ok(metadata) if metadata.is_file() => path
+ .parent()
+ .map_or_else(|| path.to_path_buf(), Utf8Path::to_path_buf),
+ _ => path.to_path_buf(),
+ }
+}
+
+fn auto_capture_checkpoint_seed(
+ project_root: &Utf8Path,
+ summary: NonEmptyText,
+) -> Result<Option<CheckpointSeed>, StoreError> {
+ let top_level = git_output(project_root, &["rev-parse", "--show-toplevel"])?;
+ let Some(repo_root) = top_level else {
+ return Ok(None);
+ };
+ let commit_hash = git_output(project_root, &["rev-parse", "HEAD"])?
+ .ok_or_else(|| StoreError::GitInspectionFailed(project_root.to_path_buf()))?;
+ let worktree_name = git_output(project_root, &["rev-parse", "--abbrev-ref", "HEAD"])?;
+ Ok(Some(CheckpointSeed {
+ summary,
+ snapshot: CheckpointSnapshotRef {
+ repo_root: Utf8PathBuf::from(repo_root),
+ worktree_root: project_root.to_path_buf(),
+ worktree_name: worktree_name.map(NonEmptyText::new).transpose()?,
+ commit_hash: GitCommitHash::new(commit_hash)?,
+ },
+ }))
+}
+
+fn git_output(project_root: &Utf8Path, args: &[&str]) -> Result<Option<String>, StoreError> {
+ let output = Command::new("git")
+ .arg("-C")
+ .arg(project_root.as_str())
+ .args(args)
+ .output()?;
+ if !output.status.success() {
+ return Ok(None);
+ }
+ let text = String::from_utf8_lossy(&output.stdout).trim().to_owned();
+ if text.is_empty() {
+ return Ok(None);
+ }
+ Ok(Some(text))
+}
+
+fn to_sql_conversion_error(error: StoreError) -> rusqlite::Error {
+ rusqlite::Error::FromSqlConversionFailure(0, rusqlite::types::Type::Text, Box::new(error))
+}
+
+fn core_to_sql_conversion_error(error: fidget_spinner_core::CoreError) -> rusqlite::Error {
+ to_sql_conversion_error(StoreError::from(error))
+}
+
+fn parse_uuid(raw: &str) -> Result<Uuid, StoreError> {
+ Uuid::parse_str(raw).map_err(StoreError::from)
+}
+
+fn parse_node_id(raw: &str) -> Result<fidget_spinner_core::NodeId, StoreError> {
+ Ok(fidget_spinner_core::NodeId::from_uuid(parse_uuid(raw)?))
+}
+
+fn parse_frontier_id(raw: &str) -> Result<fidget_spinner_core::FrontierId, StoreError> {
+ Ok(fidget_spinner_core::FrontierId::from_uuid(parse_uuid(raw)?))
+}
+
+fn parse_checkpoint_id(raw: &str) -> Result<fidget_spinner_core::CheckpointId, StoreError> {
+ Ok(fidget_spinner_core::CheckpointId::from_uuid(parse_uuid(
+ raw,
+ )?))
+}
+
+fn parse_agent_session_id(raw: &str) -> Result<fidget_spinner_core::AgentSessionId, StoreError> {
+ Ok(fidget_spinner_core::AgentSessionId::from_uuid(parse_uuid(
+ raw,
+ )?))
+}
+
+fn parse_annotation_id(raw: &str) -> Result<fidget_spinner_core::AnnotationId, StoreError> {
+ Ok(fidget_spinner_core::AnnotationId::from_uuid(parse_uuid(
+ raw,
+ )?))
+}
+
+fn parse_node_class(raw: &str) -> Result<NodeClass, StoreError> {
+ match raw {
+ "contract" => Ok(NodeClass::Contract),
+ "change" => Ok(NodeClass::Change),
+ "run" => Ok(NodeClass::Run),
+ "analysis" => Ok(NodeClass::Analysis),
+ "decision" => Ok(NodeClass::Decision),
+ "research" => Ok(NodeClass::Research),
+ "enabling" => Ok(NodeClass::Enabling),
+ "note" => Ok(NodeClass::Note),
+ other => Err(StoreError::Json(serde_json::Error::io(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unknown node class `{other}`"),
+ )))),
+ }
+}
+
+fn encode_node_track(track: fidget_spinner_core::NodeTrack) -> &'static str {
+ match track {
+ fidget_spinner_core::NodeTrack::CorePath => "core-path",
+ fidget_spinner_core::NodeTrack::OffPath => "off-path",
+ }
+}
+
+fn parse_node_track(raw: &str) -> Result<fidget_spinner_core::NodeTrack, StoreError> {
+ match raw {
+ "core-path" => Ok(fidget_spinner_core::NodeTrack::CorePath),
+ "off-path" => Ok(fidget_spinner_core::NodeTrack::OffPath),
+ other => Err(StoreError::Json(serde_json::Error::io(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unknown node track `{other}`"),
+ )))),
+ }
+}
+
+fn encode_annotation_visibility(visibility: AnnotationVisibility) -> &'static str {
+ match visibility {
+ AnnotationVisibility::HiddenByDefault => "hidden",
+ AnnotationVisibility::Visible => "visible",
+ }
+}
+
+fn parse_annotation_visibility(raw: &str) -> Result<AnnotationVisibility, StoreError> {
+ match raw {
+ "hidden" => Ok(AnnotationVisibility::HiddenByDefault),
+ "visible" => Ok(AnnotationVisibility::Visible),
+ other => Err(StoreError::Json(serde_json::Error::io(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unknown annotation visibility `{other}`"),
+ )))),
+ }
+}
+
+fn encode_edge_kind(kind: EdgeKind) -> &'static str {
+ match kind {
+ EdgeKind::Lineage => "lineage",
+ EdgeKind::Evidence => "evidence",
+ EdgeKind::Comparison => "comparison",
+ EdgeKind::Supersedes => "supersedes",
+ EdgeKind::Annotation => "annotation",
+ }
+}
+
+fn encode_frontier_status(status: FrontierStatus) -> &'static str {
+ match status {
+ FrontierStatus::Exploring => "exploring",
+ FrontierStatus::Paused => "paused",
+ FrontierStatus::Saturated => "saturated",
+ FrontierStatus::Archived => "archived",
+ }
+}
+
+fn parse_frontier_status(raw: &str) -> Result<FrontierStatus, StoreError> {
+ match raw {
+ "exploring" => Ok(FrontierStatus::Exploring),
+ "paused" => Ok(FrontierStatus::Paused),
+ "saturated" => Ok(FrontierStatus::Saturated),
+ "archived" => Ok(FrontierStatus::Archived),
+ other => Err(StoreError::Json(serde_json::Error::io(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unknown frontier status `{other}`"),
+ )))),
+ }
+}
+
+fn encode_checkpoint_disposition(disposition: CheckpointDisposition) -> &'static str {
+ match disposition {
+ CheckpointDisposition::Champion => "champion",
+ CheckpointDisposition::FrontierCandidate => "frontier-candidate",
+ CheckpointDisposition::Baseline => "baseline",
+ CheckpointDisposition::DeadEnd => "dead-end",
+ CheckpointDisposition::Archived => "archived",
+ }
+}
+
+fn parse_checkpoint_disposition(raw: &str) -> Result<CheckpointDisposition, StoreError> {
+ match raw {
+ "champion" => Ok(CheckpointDisposition::Champion),
+ "frontier-candidate" => Ok(CheckpointDisposition::FrontierCandidate),
+ "baseline" => Ok(CheckpointDisposition::Baseline),
+ "dead-end" => Ok(CheckpointDisposition::DeadEnd),
+ "archived" => Ok(CheckpointDisposition::Archived),
+ other => Err(StoreError::Json(serde_json::Error::io(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unknown checkpoint disposition `{other}`"),
+ )))),
+ }
+}
+
+fn encode_run_status(status: RunStatus) -> &'static str {
+ match status {
+ RunStatus::Queued => "queued",
+ RunStatus::Running => "running",
+ RunStatus::Succeeded => "succeeded",
+ RunStatus::Failed => "failed",
+ RunStatus::Cancelled => "cancelled",
+ }
+}
+
+fn encode_backend(backend: ExecutionBackend) -> &'static str {
+ match backend {
+ ExecutionBackend::LocalProcess => "local-process",
+ ExecutionBackend::WorktreeProcess => "worktree-process",
+ ExecutionBackend::SshProcess => "ssh-process",
+ }
+}
+
+fn encode_metric_unit(unit: MetricUnit) -> &'static str {
+ match unit {
+ MetricUnit::Seconds => "seconds",
+ MetricUnit::Bytes => "bytes",
+ MetricUnit::Count => "count",
+ MetricUnit::Ratio => "ratio",
+ MetricUnit::Custom => "custom",
+ }
+}
+
+fn encode_optimization_objective(objective: OptimizationObjective) -> &'static str {
+ match objective {
+ OptimizationObjective::Minimize => "minimize",
+ OptimizationObjective::Maximize => "maximize",
+ OptimizationObjective::Target => "target",
+ }
+}
+
+fn encode_frontier_verdict(verdict: FrontierVerdict) -> &'static str {
+ match verdict {
+ FrontierVerdict::PromoteToChampion => "promote-to-champion",
+ FrontierVerdict::KeepOnFrontier => "keep-on-frontier",
+ FrontierVerdict::RevertToChampion => "revert-to-champion",
+ FrontierVerdict::ArchiveDeadEnd => "archive-dead-end",
+ FrontierVerdict::NeedsMoreEvidence => "needs-more-evidence",
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use serde_json::json;
+
+ use super::{
+ CreateFrontierRequest, CreateNodeRequest, ListNodesQuery, PROJECT_SCHEMA_NAME, ProjectStore,
+ };
+ use fidget_spinner_core::{
+ CheckpointSnapshotRef, EvaluationProtocol, FrontierContract, MetricSpec, MetricUnit,
+ NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective,
+ };
+
+ fn temp_project_root(label: &str) -> camino::Utf8PathBuf {
+ let mut path = std::env::temp_dir();
+ path.push(format!(
+ "fidget_spinner_store_test_{}_{}",
+ label,
+ uuid::Uuid::now_v7()
+ ));
+ camino::Utf8PathBuf::from(path.to_string_lossy().into_owned())
+ }
+
+ #[test]
+ fn init_writes_model_facing_schema_file() -> Result<(), super::StoreError> {
+ let root = temp_project_root("schema");
+ let store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+
+ assert!(store.state_root().join(PROJECT_SCHEMA_NAME).exists());
+ Ok(())
+ }
+
+ #[test]
+ fn add_node_persists_hidden_annotations() -> Result<(), super::StoreError> {
+ let root = temp_project_root("notes");
+ let mut store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+ let node = store.add_node(CreateNodeRequest {
+ class: NodeClass::Research,
+ frontier_id: None,
+ title: NonEmptyText::new("feature sketch")?,
+ summary: Some(NonEmptyText::new("research note")?),
+ payload: NodePayload::with_schema(
+ store.schema().schema_ref(),
+ super::json_object(json!({"body": "freeform"}))?,
+ ),
+ annotations: vec![NodeAnnotation::hidden(NonEmptyText::new(
+ "private scratch",
+ )?)],
+ attachments: Vec::new(),
+ })?;
+ let loaded = store
+ .get_node(node.id)?
+ .ok_or(super::StoreError::NodeNotFound(node.id))?;
+
+ assert_eq!(loaded.annotations.len(), 1);
+ assert_eq!(
+ loaded.annotations[0].visibility,
+ fidget_spinner_core::AnnotationVisibility::HiddenByDefault
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn frontier_projection_tracks_initial_champion() -> Result<(), super::StoreError> {
+ let root = temp_project_root("frontier");
+ let mut store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+ let projection = store.create_frontier(CreateFrontierRequest {
+ label: NonEmptyText::new("optimization frontier")?,
+ contract_title: NonEmptyText::new("contract root")?,
+ contract_summary: None,
+ contract: FrontierContract {
+ objective: NonEmptyText::new("improve wall time")?,
+ evaluation: EvaluationProtocol {
+ benchmark_suites: std::collections::BTreeSet::from([NonEmptyText::new(
+ "smoke",
+ )?]),
+ primary_metric: MetricSpec {
+ metric_key: NonEmptyText::new("wall_clock_s")?,
+ unit: MetricUnit::Seconds,
+ objective: OptimizationObjective::Minimize,
+ },
+ supporting_metrics: std::collections::BTreeSet::new(),
+ },
+ promotion_criteria: vec![NonEmptyText::new("strict speedup")?],
+ },
+ initial_checkpoint: Some(super::CheckpointSeed {
+ summary: NonEmptyText::new("seed")?,
+ snapshot: CheckpointSnapshotRef {
+ repo_root: root.clone(),
+ worktree_root: root,
+ worktree_name: Some(NonEmptyText::new("main")?),
+ commit_hash: fidget_spinner_core::GitCommitHash::new("0123456789abcdef")?,
+ },
+ }),
+ })?;
+
+ assert!(projection.champion_checkpoint_id.is_some());
+ Ok(())
+ }
+
+ #[test]
+ fn list_nodes_hides_archived_by_default() -> Result<(), super::StoreError> {
+ let root = temp_project_root("archive");
+ let mut store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+ let node = store.add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: None,
+ title: NonEmptyText::new("quick note")?,
+ summary: None,
+ payload: NodePayload::with_schema(
+ store.schema().schema_ref(),
+ super::json_object(json!({"body": "hello"}))?,
+ ),
+ annotations: Vec::new(),
+ attachments: Vec::new(),
+ })?;
+ store.archive_node(node.id)?;
+
+ let visible = store.list_nodes(ListNodesQuery::default())?;
+ let hidden = store.list_nodes(ListNodesQuery {
+ include_archived: true,
+ ..ListNodesQuery::default()
+ })?;
+
+ assert!(visible.is_empty());
+ assert_eq!(hidden.len(), 1);
+ Ok(())
+ }
+
+ #[test]
+ fn frontier_filter_includes_root_contract_node() -> Result<(), super::StoreError> {
+ let root = temp_project_root("contract-filter");
+ let mut store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+ let projection = store.create_frontier(CreateFrontierRequest {
+ label: NonEmptyText::new("frontier")?,
+ contract_title: NonEmptyText::new("root contract")?,
+ contract_summary: None,
+ contract: FrontierContract {
+ objective: NonEmptyText::new("optimize")?,
+ evaluation: EvaluationProtocol {
+ benchmark_suites: std::collections::BTreeSet::from([NonEmptyText::new(
+ "smoke",
+ )?]),
+ primary_metric: MetricSpec {
+ metric_key: NonEmptyText::new("wall_clock_s")?,
+ unit: MetricUnit::Seconds,
+ objective: OptimizationObjective::Minimize,
+ },
+ supporting_metrics: std::collections::BTreeSet::new(),
+ },
+ promotion_criteria: vec![NonEmptyText::new("faster")?],
+ },
+ initial_checkpoint: None,
+ })?;
+
+ let nodes = store.list_nodes(ListNodesQuery {
+ frontier_id: Some(projection.frontier.id),
+ ..ListNodesQuery::default()
+ })?;
+
+ assert_eq!(nodes.len(), 1);
+ assert_eq!(nodes[0].class, NodeClass::Contract);
+ Ok(())
+ }
+}