swarm repositories / source
aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/fidget-spinner-cli/Cargo.toml12
-rw-r--r--crates/fidget-spinner-cli/src/main.rs273
-rw-r--r--crates/fidget-spinner-cli/src/ui.rs600
-rw-r--r--crates/fidget-spinner-core/Cargo.toml6
-rw-r--r--crates/fidget-spinner-core/src/error.rs6
-rw-r--r--crates/fidget-spinner-core/src/lib.rs11
-rw-r--r--crates/fidget-spinner-core/src/model.rs178
-rw-r--r--crates/fidget-spinner-store-sqlite/Cargo.toml6
-rw-r--r--crates/fidget-spinner-store-sqlite/src/lib.rs285
9 files changed, 1334 insertions, 43 deletions
diff --git a/crates/fidget-spinner-cli/Cargo.toml b/crates/fidget-spinner-cli/Cargo.toml
index 51d3cd8..bf8ffb7 100644
--- a/crates/fidget-spinner-cli/Cargo.toml
+++ b/crates/fidget-spinner-cli/Cargo.toml
@@ -1,22 +1,30 @@
[package]
name = "fidget-spinner-cli"
-description = "Thin local entrypoint for Fidget Spinner"
+categories.workspace = true
+description = "CLI, MCP server, and local navigator for Fidget Spinner"
edition.workspace = true
+keywords.workspace = true
license.workspace = true
publish = false
+readme.workspace = true
+repository.workspace = true
rust-version.workspace = true
version.workspace = true
[dependencies]
+axum.workspace = true
camino.workspace = true
clap.workspace = true
dirs.workspace = true
fidget-spinner-core = { path = "../fidget-spinner-core" }
fidget-spinner-store-sqlite = { path = "../fidget-spinner-store-sqlite" }
-libmcp = { path = "../../../libmcp/crates/libmcp" }
+linkify.workspace = true
+libmcp = { git = "https://git.swarm.moe/libmcp.git", rev = "84e898d9ba699451d5d13fe384e7bbe220564bc1" }
+maud.workspace = true
serde.workspace = true
serde_json.workspace = true
time.workspace = true
+tokio.workspace = true
uuid.workspace = true
[lints]
diff --git a/crates/fidget-spinner-cli/src/main.rs b/crates/fidget-spinner-cli/src/main.rs
index 9b2b8ae..fe4cb5f 100644
--- a/crates/fidget-spinner-cli/src/main.rs
+++ b/crates/fidget-spinner-cli/src/main.rs
@@ -1,8 +1,10 @@
mod bundled_skill;
mod mcp;
+mod ui;
use std::collections::{BTreeMap, BTreeSet};
use std::fs;
+use std::net::SocketAddr;
use std::path::{Path, PathBuf};
use camino::{Utf8Path, Utf8PathBuf};
@@ -10,7 +12,7 @@ use clap::{Args, Parser, Subcommand, ValueEnum};
use fidget_spinner_core::{
AnnotationVisibility, CodeSnapshotRef, CommandRecipe, ExecutionBackend, FrontierContract,
FrontierNote, FrontierVerdict, GitCommitHash, MetricObservation, MetricSpec, MetricUnit,
- NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective,
+ NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective, TagName,
};
use fidget_spinner_store_sqlite::{
CloseExperimentRequest, CreateFrontierRequest, CreateNodeRequest, EdgeAttachment,
@@ -21,7 +23,11 @@ use serde_json::{Map, Value, json};
use uuid::Uuid;
#[derive(Parser)]
-#[command(author, version, about = "Fidget Spinner local project CLI")]
+#[command(
+ author,
+ version,
+ about = "Fidget Spinner CLI, MCP server, and local navigator"
+)]
struct Cli {
#[command(subcommand)]
command: Command,
@@ -29,29 +35,48 @@ struct Cli {
#[derive(Subcommand)]
enum Command {
+ /// Initialize a project-local `.fidget_spinner/` store.
Init(InitArgs),
+ /// Read the local project payload schema.
Schema {
#[command(subcommand)]
command: SchemaCommand,
},
+ /// Create and inspect frontiers.
Frontier {
#[command(subcommand)]
command: FrontierCommand,
},
+ /// Create, inspect, and mutate DAG nodes.
Node {
#[command(subcommand)]
command: NodeCommand,
},
+ /// Record terse off-path notes.
Note(NoteCommand),
+ /// Manage the repo-local tag registry.
+ Tag {
+ #[command(subcommand)]
+ command: TagCommand,
+ },
+ /// Record off-path research and enabling work.
Research(ResearchCommand),
+ /// Close a core-path experiment atomically.
Experiment {
#[command(subcommand)]
command: ExperimentCommand,
},
+ /// Serve the hardened stdio MCP endpoint.
Mcp {
#[command(subcommand)]
command: McpCommand,
},
+ /// Serve the minimal local web navigator.
+ Ui {
+ #[command(subcommand)]
+ command: UiCommand,
+ },
+ /// Inspect or install bundled Codex skills.
Skill {
#[command(subcommand)]
command: SkillCommand,
@@ -60,22 +85,28 @@ enum Command {
#[derive(Args)]
struct InitArgs {
+ /// Project root to initialize.
#[arg(long, default_value = ".")]
project: PathBuf,
+ /// Human-facing project name. Defaults to the directory name.
#[arg(long)]
name: Option<String>,
+ /// Payload schema namespace written into `.fidget_spinner/schema.json`.
#[arg(long, default_value = "local.project")]
namespace: String,
}
#[derive(Subcommand)]
enum SchemaCommand {
+ /// Show the current project schema as JSON.
Show(ProjectArg),
}
#[derive(Subcommand)]
enum FrontierCommand {
+ /// Create a frontier and root contract node.
Init(FrontierInitArgs),
+ /// Show one frontier projection or list frontiers when omitted.
Status(FrontierStatusArgs),
}
@@ -115,10 +146,15 @@ struct FrontierStatusArgs {
#[derive(Subcommand)]
enum NodeCommand {
+ /// Create a generic DAG node.
Add(NodeAddArgs),
+ /// List recent nodes.
List(NodeListArgs),
+ /// Show one node in full.
Show(NodeShowArgs),
+ /// Attach an annotation to a node.
Annotate(NodeAnnotateArgs),
+ /// Archive a node without deleting it.
Archive(NodeArchiveArgs),
}
@@ -138,6 +174,8 @@ struct NodeAddArgs {
payload_json: Option<String>,
#[arg(long = "payload-file")]
payload_file: Option<PathBuf>,
+ #[command(flatten)]
+ tag_selection: ExplicitTagSelectionArgs,
#[arg(long = "field")]
fields: Vec<String>,
#[arg(long = "annotation")]
@@ -154,12 +192,22 @@ struct NodeListArgs {
frontier: Option<String>,
#[arg(long, value_enum)]
class: Option<CliNodeClass>,
+ #[arg(long = "tag")]
+ tags: Vec<String>,
#[arg(long)]
include_archived: bool,
#[arg(long, default_value_t = 20)]
limit: u32,
}
+#[derive(Args, Default)]
+struct ExplicitTagSelectionArgs {
+ #[arg(long = "tag")]
+ tags: Vec<String>,
+ #[arg(long, conflicts_with = "tags")]
+ no_tags: bool,
+}
+
#[derive(Args)]
struct NodeShowArgs {
#[command(flatten)]
@@ -198,9 +246,18 @@ struct NoteCommand {
#[derive(Subcommand)]
enum NoteSubcommand {
+ /// Record a quick off-path note.
Quick(QuickNoteArgs),
}
+#[derive(Subcommand)]
+enum TagCommand {
+ /// Register a new repo-local tag.
+ Add(TagAddArgs),
+ /// List registered repo-local tags.
+ List(ProjectArg),
+}
+
#[derive(Args)]
struct ResearchCommand {
#[command(subcommand)]
@@ -209,6 +266,7 @@ struct ResearchCommand {
#[derive(Subcommand)]
enum ResearchSubcommand {
+ /// Record off-path research or enabling work.
Add(QuickResearchArgs),
}
@@ -222,11 +280,23 @@ struct QuickNoteArgs {
title: String,
#[arg(long)]
body: String,
+ #[command(flatten)]
+ tag_selection: ExplicitTagSelectionArgs,
#[arg(long = "parent")]
parents: Vec<String>,
}
#[derive(Args)]
+struct TagAddArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ name: String,
+ #[arg(long)]
+ description: String,
+}
+
+#[derive(Args)]
struct QuickResearchArgs {
#[command(flatten)]
project: ProjectArg,
@@ -244,16 +314,24 @@ struct QuickResearchArgs {
#[derive(Subcommand)]
enum ExperimentCommand {
+ /// Close a core-path experiment with checkpoint, run, note, and verdict.
Close(ExperimentCloseArgs),
}
#[derive(Subcommand)]
enum McpCommand {
+ /// Serve the public stdio MCP host. If `--project` is omitted, the host starts unbound.
Serve(McpServeArgs),
#[command(hide = true)]
Worker(McpWorkerArgs),
}
+#[derive(Subcommand)]
+enum UiCommand {
+ /// Serve the local read-only navigator.
+ Serve(UiServeArgs),
+}
+
#[derive(Args)]
struct ExperimentCloseArgs {
#[command(flatten)]
@@ -304,33 +382,41 @@ struct ExperimentCloseArgs {
#[derive(Subcommand)]
enum SkillCommand {
+ /// List bundled skills.
List,
+ /// Install bundled skills into a Codex skill directory.
Install(SkillInstallArgs),
+ /// Print one bundled skill body.
Show(SkillShowArgs),
}
#[derive(Args)]
struct SkillInstallArgs {
+ /// Bundled skill name. Defaults to all bundled skills.
#[arg(long)]
name: Option<String>,
+ /// Destination root. Defaults to `~/.codex/skills`.
#[arg(long)]
destination: Option<PathBuf>,
}
#[derive(Args)]
struct SkillShowArgs {
+ /// Bundled skill name. Defaults to `fidget-spinner`.
#[arg(long)]
name: Option<String>,
}
#[derive(Args)]
struct ProjectArg {
+ /// Project root or any nested path inside a project containing `.fidget_spinner/`.
#[arg(long, default_value = ".")]
project: PathBuf,
}
#[derive(Args)]
struct McpServeArgs {
+ /// Optional initial project binding. When omitted, the MCP starts unbound.
#[arg(long)]
project: Option<PathBuf>,
}
@@ -341,6 +427,18 @@ struct McpWorkerArgs {
project: PathBuf,
}
+#[derive(Args)]
+struct UiServeArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ /// Bind address for the local navigator.
+ #[arg(long, default_value = "127.0.0.1:8913")]
+ bind: SocketAddr,
+ /// Maximum rows rendered in list views.
+ #[arg(long, default_value_t = 200)]
+ limit: u32,
+}
+
#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
enum CliNodeClass {
Contract,
@@ -416,6 +514,10 @@ fn run() -> Result<(), StoreError> {
Command::Note(command) => match command.command {
NoteSubcommand::Quick(args) => run_quick_note(args),
},
+ Command::Tag { command } => match command {
+ TagCommand::Add(args) => run_tag_add(args),
+ TagCommand::List(project) => run_tag_list(project),
+ },
Command::Research(command) => match command.command {
ResearchSubcommand::Add(args) => run_quick_research(args),
},
@@ -426,6 +528,9 @@ fn run() -> Result<(), StoreError> {
McpCommand::Serve(args) => mcp::serve(args.project),
McpCommand::Worker(args) => mcp::serve_worker(args.project),
},
+ Command::Ui { command } => match command {
+ UiCommand::Serve(args) => run_ui_serve(args),
+ },
Command::Skill { command } => match command {
SkillCommand::List => print_json(&bundled_skill::bundled_skill_summaries()),
SkillCommand::Install(args) => run_skill_install(args),
@@ -439,16 +544,17 @@ fn run() -> Result<(), StoreError> {
fn run_init(args: InitArgs) -> Result<(), StoreError> {
let project_root = utf8_path(args.project);
- let display_name = NonEmptyText::new(args.name.unwrap_or_else(|| {
- project_root
- .file_name()
- .map_or_else(|| "fidget-spinner-project".to_owned(), ToOwned::to_owned)
- }))?;
+ let display_name = args
+ .name
+ .map(NonEmptyText::new)
+ .transpose()?
+ .unwrap_or(default_display_name_for_root(&project_root)?);
let namespace = NonEmptyText::new(args.namespace)?;
let store = ProjectStore::init(&project_root, display_name, namespace)?;
println!("initialized {}", store.state_root());
println!("project: {}", store.config().display_name);
println!("schema: {}", store.state_root().join("schema.json"));
+ maybe_print_gitignore_hint(&project_root)?;
Ok(())
}
@@ -498,6 +604,7 @@ fn run_node_add(args: NodeAddArgs) -> Result<(), StoreError> {
.as_deref()
.map(parse_frontier_id)
.transpose()?;
+ let tags = optional_cli_tags(args.tag_selection, args.class == CliNodeClass::Note)?;
let payload = load_payload(
store.schema().schema_ref(),
args.payload_json,
@@ -514,6 +621,7 @@ fn run_node_add(args: NodeAddArgs) -> Result<(), StoreError> {
frontier_id,
title: NonEmptyText::new(args.title)?,
summary: args.summary.map(NonEmptyText::new).transpose()?,
+ tags,
payload,
annotations,
attachments: lineage_attachments(args.parents)?,
@@ -530,6 +638,7 @@ fn run_node_list(args: NodeListArgs) -> Result<(), StoreError> {
.map(parse_frontier_id)
.transpose()?,
class: args.class.map(Into::into),
+ tags: parse_tag_set(args.tags)?,
include_archived: args.include_archived,
limit: args.limit,
})?;
@@ -585,6 +694,7 @@ fn run_quick_note(args: QuickNoteArgs) -> Result<(), StoreError> {
.transpose()?,
title: NonEmptyText::new(args.title)?,
summary: None,
+ tags: Some(explicit_cli_tags(args.tag_selection)?),
payload,
annotations: Vec::new(),
attachments: lineage_attachments(args.parents)?,
@@ -592,6 +702,20 @@ fn run_quick_note(args: QuickNoteArgs) -> Result<(), StoreError> {
print_json(&node)
}
+fn run_tag_add(args: TagAddArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let tag = store.add_tag(
+ TagName::new(args.name)?,
+ NonEmptyText::new(args.description)?,
+ )?;
+ print_json(&tag)
+}
+
+fn run_tag_list(args: ProjectArg) -> Result<(), StoreError> {
+ let store = open_store(&args.project)?;
+ print_json(&store.list_tags()?)
+}
+
fn run_quick_research(args: QuickResearchArgs) -> Result<(), StoreError> {
let mut store = open_store(&args.project.project)?;
let payload = NodePayload::with_schema(
@@ -607,6 +731,7 @@ fn run_quick_research(args: QuickResearchArgs) -> Result<(), StoreError> {
.transpose()?,
title: NonEmptyText::new(args.title)?,
summary: args.summary.map(NonEmptyText::new).transpose()?,
+ tags: None,
payload,
annotations: Vec::new(),
attachments: lineage_attachments(args.parents)?,
@@ -684,6 +809,10 @@ fn run_skill_install(args: SkillInstallArgs) -> Result<(), StoreError> {
Ok(())
}
+fn run_ui_serve(args: UiServeArgs) -> Result<(), StoreError> {
+ ui::serve(utf8_path(args.project.project), args.bind, args.limit)
+}
+
fn resolve_bundled_skill(
requested_name: Option<&str>,
) -> Result<bundled_skill::BundledSkill, StoreError> {
@@ -712,10 +841,88 @@ fn open_store(path: &Path) -> Result<ProjectStore, StoreError> {
ProjectStore::open(utf8_path(path.to_path_buf()))
}
+fn open_or_init_store_for_binding(path: &Path) -> Result<ProjectStore, StoreError> {
+ let requested_root = utf8_path(path.to_path_buf());
+ match ProjectStore::open(requested_root.clone()) {
+ Ok(store) => Ok(store),
+ Err(StoreError::MissingProjectStore(_)) => {
+ let project_root = binding_bootstrap_root(&requested_root)?;
+ if !is_empty_directory(&project_root)? {
+ return Err(StoreError::MissingProjectStore(requested_root));
+ }
+ ProjectStore::init(
+ &project_root,
+ default_display_name_for_root(&project_root)?,
+ default_namespace_for_root(&project_root)?,
+ )
+ }
+ Err(error) => Err(error),
+ }
+}
+
fn utf8_path(path: impl Into<PathBuf>) -> Utf8PathBuf {
Utf8PathBuf::from(path.into().to_string_lossy().into_owned())
}
+fn binding_bootstrap_root(path: &Utf8Path) -> Result<Utf8PathBuf, StoreError> {
+ match fs::metadata(path.as_std_path()) {
+ Ok(metadata) if metadata.is_file() => Ok(path
+ .parent()
+ .map_or_else(|| path.to_path_buf(), Utf8Path::to_path_buf)),
+ Ok(_) => Ok(path.to_path_buf()),
+ Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(path.to_path_buf()),
+ Err(error) => Err(StoreError::from(error)),
+ }
+}
+
+fn is_empty_directory(path: &Utf8Path) -> Result<bool, StoreError> {
+ match fs::metadata(path.as_std_path()) {
+ Ok(metadata) if metadata.is_dir() => {
+ let mut entries = fs::read_dir(path.as_std_path())?;
+ Ok(entries.next().transpose()?.is_none())
+ }
+ Ok(_) => Ok(false),
+ Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(false),
+ Err(error) => Err(StoreError::from(error)),
+ }
+}
+
+fn default_display_name_for_root(project_root: &Utf8Path) -> Result<NonEmptyText, StoreError> {
+ NonEmptyText::new(
+ project_root
+ .file_name()
+ .map_or_else(|| "fidget-spinner-project".to_owned(), ToOwned::to_owned),
+ )
+ .map_err(StoreError::from)
+}
+
+fn default_namespace_for_root(project_root: &Utf8Path) -> Result<NonEmptyText, StoreError> {
+ let slug = slugify_namespace_component(project_root.file_name().unwrap_or("project"));
+ NonEmptyText::new(format!("local.{slug}")).map_err(StoreError::from)
+}
+
+fn slugify_namespace_component(raw: &str) -> String {
+ let mut slug = String::new();
+ let mut previous_was_separator = false;
+ for character in raw.chars().flat_map(char::to_lowercase) {
+ if character.is_ascii_alphanumeric() {
+ slug.push(character);
+ previous_was_separator = false;
+ continue;
+ }
+ if !previous_was_separator {
+ slug.push('_');
+ previous_was_separator = true;
+ }
+ }
+ let slug = slug.trim_matches('_').to_owned();
+ if slug.is_empty() {
+ "project".to_owned()
+ } else {
+ slug
+ }
+}
+
fn to_text_vec(values: Vec<String>) -> Result<Vec<NonEmptyText>, StoreError> {
values
.into_iter()
@@ -728,6 +935,35 @@ fn to_text_set(values: Vec<String>) -> Result<BTreeSet<NonEmptyText>, StoreError
to_text_vec(values).map(BTreeSet::from_iter)
}
+fn parse_tag_set(values: Vec<String>) -> Result<BTreeSet<TagName>, StoreError> {
+ values
+ .into_iter()
+ .map(TagName::new)
+ .collect::<Result<BTreeSet<_>, _>>()
+ .map_err(StoreError::from)
+}
+
+fn explicit_cli_tags(selection: ExplicitTagSelectionArgs) -> Result<BTreeSet<TagName>, StoreError> {
+ optional_cli_tags(selection, true)?.ok_or(StoreError::NoteTagsRequired)
+}
+
+fn optional_cli_tags(
+ selection: ExplicitTagSelectionArgs,
+ required: bool,
+) -> Result<Option<BTreeSet<TagName>>, StoreError> {
+ if selection.no_tags {
+ return Ok(Some(BTreeSet::new()));
+ }
+ if selection.tags.is_empty() {
+ return if required {
+ Err(StoreError::NoteTagsRequired)
+ } else {
+ Ok(None)
+ };
+ }
+ Ok(Some(parse_tag_set(selection.tags)?))
+}
+
fn parse_env(values: Vec<String>) -> BTreeMap<String, String> {
values
.into_iter()
@@ -825,6 +1061,29 @@ fn run_git(project_root: &Utf8Path, args: &[&str]) -> Result<Option<String>, Sto
Ok(Some(text))
}
+fn maybe_print_gitignore_hint(project_root: &Utf8Path) -> Result<(), StoreError> {
+ if run_git(project_root, &["rev-parse", "--show-toplevel"])?.is_none() {
+ return Ok(());
+ }
+
+ let status = std::process::Command::new("git")
+ .arg("-C")
+ .arg(project_root.as_str())
+ .args(["check-ignore", "-q", ".fidget_spinner"])
+ .status()?;
+
+ match status.code() {
+ Some(0) => Ok(()),
+ Some(1) => {
+ println!(
+ "note: add `.fidget_spinner/` to `.gitignore` or `.git/info/exclude` if you do not want local state in `git status`"
+ );
+ Ok(())
+ }
+ _ => Ok(()),
+ }
+}
+
fn parse_metric_observation(raw: String) -> Result<MetricObservation, StoreError> {
let parts = raw.split(':').collect::<Vec<_>>();
if parts.len() != 4 {
diff --git a/crates/fidget-spinner-cli/src/ui.rs b/crates/fidget-spinner-cli/src/ui.rs
new file mode 100644
index 0000000..0cb9c05
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/ui.rs
@@ -0,0 +1,600 @@
+use std::collections::BTreeMap;
+use std::io;
+use std::net::SocketAddr;
+
+use axum::Router;
+use axum::extract::{Query, State};
+use axum::http::StatusCode;
+use axum::response::{Html, IntoResponse, Response};
+use axum::routing::get;
+use camino::Utf8PathBuf;
+use fidget_spinner_core::{DagNode, FieldValueType, NodeClass, ProjectSchema, TagName};
+use linkify::{LinkFinder, LinkKind};
+use maud::{DOCTYPE, Markup, PreEscaped, html};
+use serde::Deserialize;
+use serde_json::Value;
+use time::OffsetDateTime;
+use time::format_description::well_known::Rfc3339;
+
+use crate::{open_store, to_pretty_json};
+
+#[derive(Clone)]
+struct NavigatorState {
+ project_root: Utf8PathBuf,
+ limit: u32,
+}
+
+#[derive(Debug, Default, Deserialize)]
+struct NavigatorQuery {
+ tag: Option<String>,
+}
+
+struct NavigatorEntry {
+ node: DagNode,
+ frontier_label: Option<String>,
+}
+
+struct TagFacet {
+ name: TagName,
+ description: String,
+ count: usize,
+}
+
+pub(crate) fn serve(
+ project_root: Utf8PathBuf,
+ bind: SocketAddr,
+ limit: u32,
+) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let runtime = tokio::runtime::Builder::new_multi_thread()
+ .enable_io()
+ .build()
+ .map_err(fidget_spinner_store_sqlite::StoreError::from)?;
+ runtime.block_on(async move {
+ let state = NavigatorState {
+ project_root,
+ limit,
+ };
+ let app = Router::new()
+ .route("/", get(navigator))
+ .with_state(state.clone());
+ let listener = tokio::net::TcpListener::bind(bind)
+ .await
+ .map_err(fidget_spinner_store_sqlite::StoreError::from)?;
+ println!("navigator: http://{bind}/");
+ axum::serve(listener, app).await.map_err(|error| {
+ fidget_spinner_store_sqlite::StoreError::Io(io::Error::other(error.to_string()))
+ })
+ })
+}
+
+async fn navigator(
+ State(state): State<NavigatorState>,
+ Query(query): Query<NavigatorQuery>,
+) -> Response {
+ match render_navigator(state, query) {
+ Ok(markup) => Html(markup.into_string()).into_response(),
+ Err(error) => (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ format!("navigator render failed: {error}"),
+ )
+ .into_response(),
+ }
+}
+
+fn render_navigator(
+ state: NavigatorState,
+ query: NavigatorQuery,
+) -> Result<Markup, fidget_spinner_store_sqlite::StoreError> {
+ let store = open_store(state.project_root.as_std_path())?;
+ let selected_tag = query.tag.map(TagName::new).transpose()?;
+ let schema = store.schema().clone();
+ let frontiers = store
+ .list_frontiers()?
+ .into_iter()
+ .map(|frontier| (frontier.id, frontier.label.to_string()))
+ .collect::<BTreeMap<_, _>>();
+
+ let recent_nodes = load_recent_nodes(&store, None, state.limit)?;
+ let visible_nodes = load_recent_nodes(&store, selected_tag.clone(), state.limit)?;
+ let tag_facets = store
+ .list_tags()?
+ .into_iter()
+ .map(|tag| TagFacet {
+ count: recent_nodes
+ .iter()
+ .filter(|node| node.tags.contains(&tag.name))
+ .count(),
+ description: tag.description.to_string(),
+ name: tag.name,
+ })
+ .collect::<Vec<_>>();
+ let entries = visible_nodes
+ .into_iter()
+ .map(|node| NavigatorEntry {
+ frontier_label: node
+ .frontier_id
+ .and_then(|frontier_id| frontiers.get(&frontier_id).cloned()),
+ node,
+ })
+ .collect::<Vec<_>>();
+
+ let title = selected_tag.as_ref().map_or_else(
+ || "all recent nodes".to_owned(),
+ |tag| format!("tag: {tag}"),
+ );
+ let project_name = store.config().display_name.to_string();
+
+ Ok(html! {
+ (DOCTYPE)
+ html {
+ head {
+ meta charset="utf-8";
+ meta name="viewport" content="width=device-width, initial-scale=1";
+ title { "Fidget Spinner Navigator" }
+ style { (PreEscaped(stylesheet().to_owned())) }
+ }
+ body {
+ main class="shell" {
+ aside class="rail" {
+ h1 { "Navigator" }
+ p class="project" { (project_name) }
+ nav class="tag-list" {
+ a
+ href="/"
+ class={ "tag-link " (if selected_tag.is_none() { "selected" } else { "" }) } {
+ span class="tag-name" { "all" }
+ span class="tag-count" { (recent_nodes.len()) }
+ }
+ @for facet in &tag_facets {
+ a
+ href={ "/?tag=" (facet.name.as_str()) }
+ class={ "tag-link " (if selected_tag.as_ref() == Some(&facet.name) { "selected" } else { "" }) } {
+ span class="tag-name" { (facet.name.as_str()) }
+ span class="tag-count" { (facet.count) }
+ span class="tag-description" { (facet.description.as_str()) }
+ }
+ }
+ }
+ }
+ section class="feed" {
+ header class="feed-header" {
+ h2 { (title) }
+ p class="feed-meta" {
+ (entries.len()) " shown"
+ " · "
+ (recent_nodes.len()) " recent"
+ " · "
+ (state.limit) " max"
+ }
+ }
+ @if entries.is_empty() {
+ article class="empty-state" {
+ h3 { "No matching nodes" }
+ p { "Try clearing the tag filter or recording new notes." }
+ }
+ } @else {
+ @for entry in &entries {
+ (render_entry(entry, &schema))
+ }
+ }
+ }
+ }
+ }
+ }
+ })
+}
+
+fn load_recent_nodes(
+ store: &fidget_spinner_store_sqlite::ProjectStore,
+ tag: Option<TagName>,
+ limit: u32,
+) -> Result<Vec<DagNode>, fidget_spinner_store_sqlite::StoreError> {
+ let summaries = store.list_nodes(fidget_spinner_store_sqlite::ListNodesQuery {
+ tags: tag.into_iter().collect(),
+ limit,
+ ..fidget_spinner_store_sqlite::ListNodesQuery::default()
+ })?;
+ summaries
+ .into_iter()
+ .map(|summary| {
+ store.get_node(summary.id)?.ok_or(
+ fidget_spinner_store_sqlite::StoreError::NodeNotFound(summary.id),
+ )
+ })
+ .collect()
+}
+
+fn render_entry(entry: &NavigatorEntry, schema: &ProjectSchema) -> Markup {
+ let body = entry.node.payload.field("body").and_then(Value::as_str);
+ let mut keys = entry
+ .node
+ .payload
+ .fields
+ .keys()
+ .filter(|name| name.as_str() != "body")
+ .cloned()
+ .collect::<Vec<_>>();
+ keys.sort_unstable();
+
+ html! {
+ article class="entry" id={ "node-" (entry.node.id) } {
+ header class="entry-header" {
+ div class="entry-title-row" {
+ span class={ "class-badge class-" (entry.node.class.as_str()) } {
+ (entry.node.class.as_str())
+ }
+ h3 class="entry-title" {
+ a href={ "#node-" (entry.node.id) } { (entry.node.title.as_str()) }
+ }
+ }
+ div class="entry-meta" {
+ span { (render_timestamp(entry.node.updated_at)) }
+ @if let Some(label) = &entry.frontier_label {
+ span { "frontier: " (label.as_str()) }
+ }
+ @if !entry.node.tags.is_empty() {
+ span class="tag-strip" {
+ @for tag in &entry.node.tags {
+ a class="entry-tag" href={ "/?tag=" (tag.as_str()) } { (tag.as_str()) }
+ }
+ }
+ }
+ }
+ }
+ @if let Some(summary) = &entry.node.summary {
+ p class="entry-summary" { (summary.as_str()) }
+ }
+ @if let Some(body) = body {
+ section class="entry-body" {
+ (render_string_value(body))
+ }
+ }
+ @if !keys.is_empty() {
+ dl class="field-list" {
+ @for key in &keys {
+ @if let Some(value) = entry.node.payload.field(key) {
+ (render_field(entry.node.class, schema, key, value))
+ }
+ }
+ }
+ }
+ @if !entry.node.diagnostics.items.is_empty() {
+ section class="diagnostics" {
+ h4 { "diagnostics" }
+ ul {
+ @for item in &entry.node.diagnostics.items {
+ li {
+ span class="diag-severity" { (format!("{:?}", item.severity).to_ascii_lowercase()) }
+ " "
+ (item.message.as_str())
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+fn render_field(class: NodeClass, schema: &ProjectSchema, key: &str, value: &Value) -> Markup {
+ let value_type = schema
+ .field_spec(class, key)
+ .and_then(|field| field.value_type);
+ let is_plottable = schema
+ .field_spec(class, key)
+ .is_some_and(|field| field.is_plottable());
+ html! {
+ dt {
+ (key)
+ @if let Some(value_type) = value_type {
+ span class="field-type" { (value_type.as_str()) }
+ }
+ @if is_plottable {
+ span class="field-type plottable" { "plot" }
+ }
+ }
+ dd {
+ @match value_type {
+ Some(FieldValueType::String) => {
+ @if let Some(text) = value.as_str() {
+ (render_string_value(text))
+ } @else {
+ (render_json_value(value))
+ }
+ }
+ Some(FieldValueType::Numeric) => {
+ @if let Some(number) = value.as_f64() {
+ code class="numeric" { (number) }
+ } @else {
+ (render_json_value(value))
+ }
+ }
+ Some(FieldValueType::Boolean) => {
+ @if let Some(boolean) = value.as_bool() {
+ span class={ "boolean " (if boolean { "true" } else { "false" }) } {
+ (if boolean { "true" } else { "false" })
+ }
+ } @else {
+ (render_json_value(value))
+ }
+ }
+ Some(FieldValueType::Timestamp) => {
+ @if let Some(raw) = value.as_str() {
+ time datetime=(raw) { (render_timestamp_value(raw)) }
+ } @else {
+ (render_json_value(value))
+ }
+ }
+ None => (render_json_value(value)),
+ }
+ }
+ }
+}
+
+fn render_string_value(text: &str) -> Markup {
+ let finder = LinkFinder::new();
+ html! {
+ div class="rich-text" {
+ @for line in text.lines() {
+ p {
+ @for span in finder.spans(line) {
+ @match span.kind() {
+ Some(LinkKind::Url) => a href=(span.as_str()) { (span.as_str()) },
+ _ => (span.as_str()),
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+fn render_json_value(value: &Value) -> Markup {
+ let text = to_pretty_json(value).unwrap_or_else(|_| value.to_string());
+ html! {
+ pre class="json-value" { (text) }
+ }
+}
+
+fn render_timestamp(timestamp: OffsetDateTime) -> String {
+ timestamp
+ .format(&Rfc3339)
+ .unwrap_or_else(|_| timestamp.to_string())
+}
+
+fn render_timestamp_value(raw: &str) -> String {
+ OffsetDateTime::parse(raw, &Rfc3339)
+ .map(render_timestamp)
+ .unwrap_or_else(|_| raw.to_owned())
+}
+
+fn stylesheet() -> &'static str {
+ r#"
+ :root {
+ color-scheme: light;
+ --bg: #f6f3ec;
+ --panel: #fffdf8;
+ --line: #d8d1c4;
+ --text: #22201a;
+ --muted: #746e62;
+ --accent: #2d5c4d;
+ --accent-soft: #dbe8e2;
+ --tag: #ece5d8;
+ --warn: #8b5b24;
+ }
+
+ * { box-sizing: border-box; }
+
+ body {
+ margin: 0;
+ background: var(--bg);
+ color: var(--text);
+ font: 15px/1.5 "Iosevka Web", "IBM Plex Mono", "SFMono-Regular", monospace;
+ }
+
+ a {
+ color: var(--accent);
+ text-decoration: none;
+ }
+
+ a:hover {
+ text-decoration: underline;
+ }
+
+ .shell {
+ display: grid;
+ grid-template-columns: 18rem minmax(0, 1fr);
+ min-height: 100vh;
+ }
+
+ .rail {
+ border-right: 1px solid var(--line);
+ padding: 1.25rem 1rem;
+ position: sticky;
+ top: 0;
+ align-self: start;
+ height: 100vh;
+ overflow: auto;
+ background: rgba(255, 253, 248, 0.85);
+ backdrop-filter: blur(6px);
+ }
+
+ .project, .feed-meta, .entry-meta, .entry-summary, .tag-description {
+ color: var(--muted);
+ }
+
+ .tag-list {
+ display: grid;
+ gap: 0.5rem;
+ }
+
+ .tag-link {
+ display: grid;
+ grid-template-columns: minmax(0, 1fr) auto;
+ gap: 0.2rem 0.75rem;
+ padding: 0.55rem 0.7rem;
+ border: 1px solid var(--line);
+ background: var(--panel);
+ }
+
+ .tag-link.selected {
+ border-color: var(--accent);
+ background: var(--accent-soft);
+ }
+
+ .tag-name {
+ font-weight: 700;
+ overflow-wrap: anywhere;
+ }
+
+ .tag-count {
+ color: var(--muted);
+ }
+
+ .tag-description {
+ grid-column: 1 / -1;
+ font-size: 0.9rem;
+ }
+
+ .feed {
+ padding: 1.5rem;
+ display: grid;
+ gap: 1rem;
+ }
+
+ .feed-header {
+ padding-bottom: 0.5rem;
+ border-bottom: 1px solid var(--line);
+ }
+
+ .entry, .empty-state {
+ background: var(--panel);
+ border: 1px solid var(--line);
+ padding: 1rem 1.1rem;
+ }
+
+ .entry-header {
+ display: grid;
+ gap: 0.35rem;
+ margin-bottom: 0.75rem;
+ }
+
+ .entry-title-row {
+ display: flex;
+ gap: 0.75rem;
+ align-items: baseline;
+ }
+
+ .entry-title {
+ margin: 0;
+ font-size: 1.05rem;
+ }
+
+ .entry-meta {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 0.75rem;
+ font-size: 0.9rem;
+ }
+
+ .class-badge, .field-type, .entry-tag {
+ display: inline-block;
+ padding: 0.08rem 0.4rem;
+ border: 1px solid var(--line);
+ background: var(--tag);
+ font-size: 0.82rem;
+ }
+
+ .field-type.plottable {
+ background: var(--accent-soft);
+ border-color: var(--accent);
+ }
+
+ .tag-strip {
+ display: inline-flex;
+ flex-wrap: wrap;
+ gap: 0.35rem;
+ }
+
+ .entry-body {
+ margin-bottom: 0.9rem;
+ }
+
+ .rich-text p {
+ margin: 0 0 0.55rem;
+ }
+
+ .rich-text p:last-child {
+ margin-bottom: 0;
+ }
+
+ .field-list {
+ display: grid;
+ grid-template-columns: minmax(12rem, 18rem) minmax(0, 1fr);
+ gap: 0.55rem 1rem;
+ margin: 0;
+ }
+
+ .field-list dt {
+ font-weight: 700;
+ display: flex;
+ gap: 0.4rem;
+ align-items: center;
+ overflow-wrap: anywhere;
+ }
+
+ .field-list dd {
+ margin: 0;
+ }
+
+ .json-value {
+ margin: 0;
+ padding: 0.6rem 0.7rem;
+ background: #f3eee4;
+ overflow: auto;
+ }
+
+ .boolean.true { color: var(--accent); }
+ .boolean.false { color: #8a2f2f; }
+ .numeric { font-size: 1rem; }
+
+ .diagnostics {
+ margin-top: 1rem;
+ padding-top: 0.8rem;
+ border-top: 1px dashed var(--line);
+ }
+
+ .diagnostics h4 {
+ margin: 0 0 0.4rem;
+ font-size: 0.9rem;
+ text-transform: lowercase;
+ }
+
+ .diagnostics ul {
+ margin: 0;
+ padding-left: 1.1rem;
+ }
+
+ .diag-severity {
+ color: var(--warn);
+ font-weight: 700;
+ }
+
+ @media (max-width: 900px) {
+ .shell {
+ grid-template-columns: 1fr;
+ }
+
+ .rail {
+ position: static;
+ height: auto;
+ border-right: 0;
+ border-bottom: 1px solid var(--line);
+ }
+
+ .field-list {
+ grid-template-columns: 1fr;
+ }
+ }
+ "#
+}
diff --git a/crates/fidget-spinner-core/Cargo.toml b/crates/fidget-spinner-core/Cargo.toml
index b472b91..c147ee2 100644
--- a/crates/fidget-spinner-core/Cargo.toml
+++ b/crates/fidget-spinner-core/Cargo.toml
@@ -1,9 +1,13 @@
[package]
name = "fidget-spinner-core"
-description = "Core domain model for a local-first experimental DAG"
+categories.workspace = true
+description = "Core domain model for the Fidget Spinner experimental DAG"
edition.workspace = true
+keywords.workspace = true
license.workspace = true
publish = false
+readme.workspace = true
+repository.workspace = true
rust-version.workspace = true
version.workspace = true
diff --git a/crates/fidget-spinner-core/src/error.rs b/crates/fidget-spinner-core/src/error.rs
index 8e976c7..eb05ba7 100644
--- a/crates/fidget-spinner-core/src/error.rs
+++ b/crates/fidget-spinner-core/src/error.rs
@@ -4,6 +4,12 @@ use thiserror::Error;
pub enum CoreError {
#[error("text values must not be blank")]
EmptyText,
+ #[error("tag names must not be blank")]
+ EmptyTagName,
+ #[error(
+ "invalid tag name `{0}`; expected lowercase ascii alphanumerics separated by `-`, `_`, or `/`"
+ )]
+ InvalidTagName(String),
#[error("command recipes must contain at least one argv element")]
EmptyCommand,
}
diff --git a/crates/fidget-spinner-core/src/lib.rs b/crates/fidget-spinner-core/src/lib.rs
index f368268..b5e2b23 100644
--- a/crates/fidget-spinner-core/src/lib.rs
+++ b/crates/fidget-spinner-core/src/lib.rs
@@ -18,9 +18,10 @@ pub use crate::model::{
AdmissionState, AnnotationVisibility, ArtifactKind, ArtifactRef, CheckpointDisposition,
CheckpointRecord, CheckpointSnapshotRef, CodeSnapshotRef, CommandRecipe, CompletedExperiment,
DagEdge, DagNode, DiagnosticSeverity, EdgeKind, EvaluationProtocol, ExecutionBackend,
- ExperimentResult, FieldPresence, FieldRole, FrontierContract, FrontierNote, FrontierProjection,
- FrontierRecord, FrontierStatus, FrontierVerdict, GitCommitHash, InferencePolicy, JsonObject,
- MetricObservation, MetricSpec, MetricUnit, NodeAnnotation, NodeClass, NodeDiagnostics,
- NodePayload, NodeTrack, NonEmptyText, OptimizationObjective, PayloadSchemaRef,
- ProjectFieldSpec, ProjectSchema, RunRecord, RunStatus, ValidationDiagnostic,
+ ExperimentResult, FieldPresence, FieldRole, FieldValueType, FrontierContract, FrontierNote,
+ FrontierProjection, FrontierRecord, FrontierStatus, FrontierVerdict, GitCommitHash,
+ InferencePolicy, JsonObject, MetricObservation, MetricSpec, MetricUnit, NodeAnnotation,
+ NodeClass, NodeDiagnostics, NodePayload, NodeTrack, NonEmptyText, OptimizationObjective,
+ PayloadSchemaRef, ProjectFieldSpec, ProjectSchema, RunRecord, RunStatus, TagName, TagRecord,
+ ValidationDiagnostic,
};
diff --git a/crates/fidget-spinner-core/src/model.rs b/crates/fidget-spinner-core/src/model.rs
index f0d1818..2de3705 100644
--- a/crates/fidget-spinner-core/src/model.rs
+++ b/crates/fidget-spinner-core/src/model.rs
@@ -5,6 +5,7 @@ use camino::Utf8PathBuf;
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use time::OffsetDateTime;
+use time::format_description::well_known::Rfc3339;
use crate::{
AgentSessionId, AnnotationId, ArtifactId, CheckpointId, CoreError, ExperimentId, FrontierId,
@@ -57,6 +58,60 @@ impl Display for GitCommitHash {
}
}
+#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
+#[serde(try_from = "String", into = "String")]
+pub struct TagName(String);
+
+impl TagName {
+ pub fn new(value: impl Into<String>) -> Result<Self, CoreError> {
+ let normalized = value.into().trim().to_ascii_lowercase();
+ if normalized.is_empty() {
+ return Err(CoreError::EmptyTagName);
+ }
+ let mut previous_was_separator = true;
+ for character in normalized.chars() {
+ if character.is_ascii_lowercase() || character.is_ascii_digit() {
+ previous_was_separator = false;
+ continue;
+ }
+ if matches!(character, '-' | '_' | '/') && !previous_was_separator {
+ previous_was_separator = true;
+ continue;
+ }
+ return Err(CoreError::InvalidTagName(normalized));
+ }
+ if previous_was_separator {
+ return Err(CoreError::InvalidTagName(normalized));
+ }
+ Ok(Self(normalized))
+ }
+
+ #[must_use]
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl TryFrom<String> for TagName {
+ type Error = CoreError;
+
+ fn try_from(value: String) -> Result<Self, Self::Error> {
+ Self::new(value)
+ }
+}
+
+impl From<TagName> for String {
+ fn from(value: TagName) -> Self {
+ value.0
+ }
+}
+
+impl Display for TagName {
+ fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
+ formatter.write_str(&self.0)
+ }
+}
+
pub type JsonObject = Map<String, Value>;
#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
@@ -143,6 +198,44 @@ pub enum InferencePolicy {
ModelMayInfer,
}
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+#[serde(rename_all = "snake_case")]
+pub enum FieldValueType {
+ String,
+ Numeric,
+ Boolean,
+ Timestamp,
+}
+
+impl FieldValueType {
+ #[must_use]
+ pub const fn is_plottable(self) -> bool {
+ matches!(self, Self::Numeric | Self::Timestamp)
+ }
+
+ #[must_use]
+ pub fn accepts(self, value: &Value) -> bool {
+ match self {
+ Self::String => value.is_string(),
+ Self::Numeric => value.is_number(),
+ Self::Boolean => value.is_boolean(),
+ Self::Timestamp => value
+ .as_str()
+ .is_some_and(|raw| OffsetDateTime::parse(raw, &Rfc3339).is_ok()),
+ }
+ }
+
+ #[must_use]
+ pub const fn as_str(self) -> &'static str {
+ match self {
+ Self::String => "string",
+ Self::Numeric => "numeric",
+ Self::Boolean => "boolean",
+ Self::Timestamp => "timestamp",
+ }
+ }
+}
+
#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub enum FrontierStatus {
Exploring,
@@ -265,6 +358,13 @@ impl NodeAnnotation {
}
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct TagRecord {
+ pub name: TagName,
+ pub description: NonEmptyText,
+ pub created_at: OffsetDateTime,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub struct ValidationDiagnostic {
pub severity: DiagnosticSeverity,
pub code: String,
@@ -296,6 +396,8 @@ pub struct ProjectFieldSpec {
pub severity: DiagnosticSeverity,
pub role: FieldRole,
pub inference_policy: InferencePolicy,
+ #[serde(default)]
+ pub value_type: Option<FieldValueType>,
}
impl ProjectFieldSpec {
@@ -303,6 +405,11 @@ impl ProjectFieldSpec {
pub fn applies_to(&self, class: NodeClass) -> bool {
self.node_classes.is_empty() || self.node_classes.contains(&class)
}
+
+ #[must_use]
+ pub fn is_plottable(&self) -> bool {
+ self.value_type.is_some_and(FieldValueType::is_plottable)
+ }
}
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
@@ -331,14 +438,37 @@ impl ProjectSchema {
}
#[must_use]
+ pub fn field_spec(&self, class: NodeClass, name: &str) -> Option<&ProjectFieldSpec> {
+ self.fields
+ .iter()
+ .find(|field| field.applies_to(class) && field.name.as_str() == name)
+ }
+
+ #[must_use]
pub fn validate_node(&self, class: NodeClass, payload: &NodePayload) -> NodeDiagnostics {
let items = self
.fields
.iter()
.filter(|field| field.applies_to(class))
.filter_map(|field| {
- let is_missing = payload.field(field.name.as_str()).is_none();
+ let value = payload.field(field.name.as_str());
+ let is_missing = value.is_none();
if !is_missing || field.presence == FieldPresence::Optional {
+ if let (Some(value), Some(value_type)) = (value, field.value_type)
+ && !value_type.accepts(value)
+ {
+ return Some(ValidationDiagnostic {
+ severity: field.severity,
+ code: format!("type.{}", field.name.as_str()),
+ message: validation_message(format!(
+ "project payload field `{}` expected {}, found {}",
+ field.name.as_str(),
+ value_type.as_str(),
+ json_value_kind(value)
+ )),
+ field_name: Some(field.name.as_str().to_owned()),
+ });
+ }
return None;
}
Some(ValidationDiagnostic {
@@ -366,6 +496,17 @@ fn validation_message(value: String) -> NonEmptyText {
}
}
+fn json_value_kind(value: &Value) -> &'static str {
+ match value {
+ Value::Null => "null",
+ Value::Bool(_) => "boolean",
+ Value::Number(_) => "numeric",
+ Value::String(_) => "string",
+ Value::Array(_) => "array",
+ Value::Object(_) => "object",
+ }
+}
+
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct DagNode {
pub id: NodeId,
@@ -375,6 +516,7 @@ pub struct DagNode {
pub archived: bool,
pub title: NonEmptyText,
pub summary: Option<NonEmptyText>,
+ pub tags: BTreeSet<TagName>,
pub payload: NodePayload,
pub annotations: Vec<NodeAnnotation>,
pub diagnostics: NodeDiagnostics,
@@ -402,6 +544,7 @@ impl DagNode {
archived: false,
title,
summary,
+ tags: BTreeSet::new(),
payload,
annotations: Vec::new(),
diagnostics,
@@ -628,8 +771,9 @@ mod tests {
use serde_json::json;
use super::{
- CommandRecipe, DagNode, DiagnosticSeverity, FieldPresence, FieldRole, InferencePolicy,
- JsonObject, NodeClass, NodePayload, NonEmptyText, ProjectFieldSpec, ProjectSchema,
+ CommandRecipe, DagNode, DiagnosticSeverity, FieldPresence, FieldRole, FieldValueType,
+ InferencePolicy, JsonObject, NodeClass, NodePayload, NonEmptyText, ProjectFieldSpec,
+ ProjectSchema,
};
use crate::CoreError;
@@ -661,6 +805,7 @@ mod tests {
severity: DiagnosticSeverity::Warning,
role: FieldRole::ProjectionGate,
inference_policy: InferencePolicy::ManualOnly,
+ value_type: None,
}],
};
let payload = NodePayload::with_schema(schema.schema_ref(), JsonObject::new());
@@ -673,6 +818,33 @@ mod tests {
}
#[test]
+ fn schema_validation_warns_on_type_mismatch() -> Result<(), CoreError> {
+ let schema = ProjectSchema {
+ namespace: NonEmptyText::new("local.libgrid")?,
+ version: 1,
+ fields: vec![ProjectFieldSpec {
+ name: NonEmptyText::new("improvement")?,
+ node_classes: BTreeSet::from([NodeClass::Analysis]),
+ presence: FieldPresence::Recommended,
+ severity: DiagnosticSeverity::Warning,
+ role: FieldRole::RenderOnly,
+ inference_policy: InferencePolicy::ManualOnly,
+ value_type: Some(FieldValueType::Numeric),
+ }],
+ };
+ let payload = NodePayload::with_schema(
+ schema.schema_ref(),
+ JsonObject::from_iter([("improvement".to_owned(), json!("not a number"))]),
+ );
+ let diagnostics = schema.validate_node(NodeClass::Analysis, &payload);
+
+ assert_eq!(diagnostics.admission, super::AdmissionState::Admitted);
+ assert_eq!(diagnostics.items.len(), 1);
+ assert_eq!(diagnostics.items[0].code, "type.improvement");
+ Ok(())
+ }
+
+ #[test]
fn research_nodes_default_to_off_path() -> Result<(), CoreError> {
let payload = NodePayload {
schema: None,
diff --git a/crates/fidget-spinner-store-sqlite/Cargo.toml b/crates/fidget-spinner-store-sqlite/Cargo.toml
index 54e0784..00fd070 100644
--- a/crates/fidget-spinner-store-sqlite/Cargo.toml
+++ b/crates/fidget-spinner-store-sqlite/Cargo.toml
@@ -1,9 +1,13 @@
[package]
name = "fidget-spinner-store-sqlite"
-description = "Per-project SQLite store for the Fidget Spinner DAG spine"
+categories.workspace = true
+description = "SQLite-backed per-project store for Fidget Spinner DAG projects"
edition.workspace = true
+keywords.workspace = true
license.workspace = true
publish = false
+readme.workspace = true
+repository.workspace = true
rust-version.workspace = true
version.workspace = true
diff --git a/crates/fidget-spinner-store-sqlite/src/lib.rs b/crates/fidget-spinner-store-sqlite/src/lib.rs
index 7c129ab..da9fa42 100644
--- a/crates/fidget-spinner-store-sqlite/src/lib.rs
+++ b/crates/fidget-spinner-store-sqlite/src/lib.rs
@@ -1,3 +1,5 @@
+use std::collections::BTreeSet;
+use std::fmt::Write as _;
use std::fs;
use std::io;
use std::process::Command;
@@ -9,9 +11,10 @@ use fidget_spinner_core::{
ExecutionBackend, ExperimentResult, FrontierContract, FrontierNote, FrontierProjection,
FrontierRecord, FrontierStatus, FrontierVerdict, GitCommitHash, JsonObject, MetricObservation,
MetricSpec, MetricUnit, NodeAnnotation, NodeClass, NodeDiagnostics, NodePayload, NonEmptyText,
- OptimizationObjective, ProjectSchema, RunRecord, RunStatus,
+ OptimizationObjective, ProjectSchema, RunRecord, RunStatus, TagName, TagRecord,
};
-use rusqlite::{Connection, OptionalExtension, Transaction, params};
+use rusqlite::types::Value as SqlValue;
+use rusqlite::{Connection, OptionalExtension, Transaction, params, params_from_iter};
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use thiserror::Error;
@@ -54,6 +57,12 @@ pub enum StoreError {
MissingChampionCheckpoint {
frontier_id: fidget_spinner_core::FrontierId,
},
+ #[error("unknown tag `{0}`")]
+ UnknownTag(TagName),
+ #[error("tag `{0}` already exists")]
+ DuplicateTag(TagName),
+ #[error("note nodes require an explicit tag list; use an empty list if no tags apply")]
+ NoteTagsRequired,
#[error("git repository inspection failed for {0}")]
GitInspectionFailed(Utf8PathBuf),
}
@@ -82,6 +91,7 @@ pub struct CreateNodeRequest {
pub frontier_id: Option<fidget_spinner_core::FrontierId>,
pub title: NonEmptyText,
pub summary: Option<NonEmptyText>,
+ pub tags: Option<BTreeSet<TagName>>,
pub payload: NodePayload,
pub annotations: Vec<NodeAnnotation>,
pub attachments: Vec<EdgeAttachment>,
@@ -122,6 +132,7 @@ impl EdgeAttachment {
pub struct ListNodesQuery {
pub frontier_id: Option<fidget_spinner_core::FrontierId>,
pub class: Option<NodeClass>,
+ pub tags: BTreeSet<TagName>,
pub include_archived: bool,
pub limit: u32,
}
@@ -131,6 +142,7 @@ impl Default for ListNodesQuery {
Self {
frontier_id: None,
class: None,
+ tags: BTreeSet::new(),
include_archived: false,
limit: 20,
}
@@ -146,6 +158,7 @@ pub struct NodeSummary {
pub archived: bool,
pub title: NonEmptyText,
pub summary: Option<NonEmptyText>,
+ pub tags: BTreeSet<TagName>,
pub diagnostic_count: u64,
pub hidden_annotation_count: u64,
pub created_at: OffsetDateTime,
@@ -317,6 +330,47 @@ impl ProjectStore {
self.frontier_projection(frontier.id)
}
+ pub fn add_tag(
+ &mut self,
+ name: TagName,
+ description: NonEmptyText,
+ ) -> Result<TagRecord, StoreError> {
+ let record = TagRecord {
+ name,
+ description,
+ created_at: OffsetDateTime::now_utc(),
+ };
+ let tx = self.connection.transaction()?;
+ insert_tag(&tx, &record)?;
+ insert_event(
+ &tx,
+ "tag",
+ record.name.as_str(),
+ "tag.created",
+ json!({"description": record.description.as_str()}),
+ )?;
+ tx.commit()?;
+ Ok(record)
+ }
+
+ pub fn list_tags(&self) -> Result<Vec<TagRecord>, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT name, description, created_at
+ FROM tags
+ ORDER BY name ASC",
+ )?;
+ let mut rows = statement.query([])?;
+ let mut items = Vec::new();
+ while let Some(row) = rows.next()? {
+ items.push(TagRecord {
+ name: TagName::new(row.get::<_, String>(0)?)?,
+ description: NonEmptyText::new(row.get::<_, String>(1)?)?,
+ created_at: decode_timestamp(&row.get::<_, String>(2)?)?,
+ });
+ }
+ Ok(items)
+ }
+
pub fn add_node(&mut self, request: CreateNodeRequest) -> Result<DagNode, StoreError> {
let diagnostics = self.schema.validate_node(request.class, &request.payload);
let mut node = DagNode::new(
@@ -327,9 +381,16 @@ impl ProjectStore {
request.payload,
diagnostics,
);
+ node.tags = match (request.class, request.tags) {
+ (NodeClass::Note, Some(tags)) => tags,
+ (NodeClass::Note, None) => return Err(StoreError::NoteTagsRequired),
+ (_, Some(tags)) => tags,
+ (_, None) => BTreeSet::new(),
+ };
node.annotations = request.annotations;
let tx = self.connection.transaction()?;
+ ensure_known_tags(&tx, &node.tags)?;
insert_node(&tx, &node)?;
for attachment in &request.attachments {
insert_edge(&tx, &attachment.materialize(node.id))?;
@@ -419,6 +480,7 @@ impl ProjectStore {
.query_row(params![node_id.to_string()], read_node_row)
.optional()?;
node.map(|mut item| {
+ item.tags = self.load_tags(item.id)?;
item.annotations = self.load_annotations(item.id)?;
Ok(item)
})
@@ -428,8 +490,7 @@ impl ProjectStore {
pub fn list_nodes(&self, query: ListNodesQuery) -> Result<Vec<NodeSummary>, StoreError> {
let frontier_id = query.frontier_id.map(|id| id.to_string());
let class = query.class.map(|item| item.as_str().to_owned());
- let limit = i64::from(query.limit);
- let mut statement = self.connection.prepare(
+ let mut sql = String::from(
"SELECT
n.id,
n.class,
@@ -449,21 +510,42 @@ impl ProjectStore {
FROM nodes AS n
WHERE (?1 IS NULL OR n.frontier_id = ?1)
AND (?2 IS NULL OR n.class = ?2)
- AND (?3 = 1 OR n.archived = 0)
+ AND (?3 = 1 OR n.archived = 0)",
+ );
+ let mut parameters = vec![
+ frontier_id.map_or(SqlValue::Null, SqlValue::Text),
+ class.map_or(SqlValue::Null, SqlValue::Text),
+ SqlValue::Integer(i64::from(query.include_archived)),
+ ];
+ for (index, tag) in query.tags.iter().enumerate() {
+ let placeholder = parameters.len() + 1;
+ let _ = write!(
+ sql,
+ "
+ AND EXISTS (
+ SELECT 1
+ FROM node_tags AS nt{index}
+ WHERE nt{index}.node_id = n.id AND nt{index}.tag_name = ?{placeholder}
+ )"
+ );
+ parameters.push(SqlValue::Text(tag.as_str().to_owned()));
+ }
+ let limit_placeholder = parameters.len() + 1;
+ let _ = write!(
+ sql,
+ "
ORDER BY n.updated_at DESC
- LIMIT ?4",
- )?;
- let mut rows = statement.query(params![
- frontier_id,
- class,
- i64::from(query.include_archived),
- limit
- ])?;
+ LIMIT ?{limit_placeholder}"
+ );
+ parameters.push(SqlValue::Integer(i64::from(query.limit)));
+ let mut statement = self.connection.prepare(&sql)?;
+ let mut rows = statement.query(params_from_iter(parameters.iter()))?;
let mut items = Vec::new();
while let Some(row) = rows.next()? {
let diagnostics = decode_json::<NodeDiagnostics>(&row.get::<_, String>(7)?)?;
+ let node_id = parse_node_id(&row.get::<_, String>(0)?)?;
items.push(NodeSummary {
- id: parse_node_id(&row.get::<_, String>(0)?)?,
+ id: node_id,
class: parse_node_class(&row.get::<_, String>(1)?)?,
track: parse_node_track(&row.get::<_, String>(2)?)?,
frontier_id: row
@@ -476,6 +558,7 @@ impl ProjectStore {
.get::<_, Option<String>>(6)?
.map(NonEmptyText::new)
.transpose()?,
+ tags: self.load_tags(node_id)?,
diagnostic_count: diagnostics.items.len() as u64,
hidden_annotation_count: row.get::<_, i64>(10)? as u64,
created_at: decode_timestamp(&row.get::<_, String>(8)?)?,
@@ -505,7 +588,7 @@ impl ProjectStore {
) -> Result<FrontierProjection, StoreError> {
let frontier = self.load_frontier(frontier_id)?;
let mut champion_checkpoint_id = None;
- let mut candidate_checkpoint_ids = std::collections::BTreeSet::new();
+ let mut candidate_checkpoint_ids = BTreeSet::new();
let mut statement = self.connection.prepare(
"SELECT id, disposition
@@ -768,6 +851,24 @@ impl ProjectStore {
Ok(items)
}
+ fn load_tags(
+ &self,
+ node_id: fidget_spinner_core::NodeId,
+ ) -> Result<BTreeSet<TagName>, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT tag_name
+ FROM node_tags
+ WHERE node_id = ?1
+ ORDER BY tag_name ASC",
+ )?;
+ let mut rows = statement.query(params![node_id.to_string()])?;
+ let mut items = BTreeSet::new();
+ while let Some(row) = rows.next()? {
+ let _ = items.insert(TagName::new(row.get::<_, String>(0)?)?);
+ }
+ Ok(items)
+ }
+
fn load_frontier(
&self,
frontier_id: fidget_spinner_core::FrontierId,
@@ -817,6 +918,18 @@ fn migrate(connection: &Connection) -> Result<(), StoreError> {
created_at TEXT NOT NULL
);
+ CREATE TABLE IF NOT EXISTS tags (
+ name TEXT PRIMARY KEY,
+ description TEXT NOT NULL,
+ created_at TEXT NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS node_tags (
+ node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
+ tag_name TEXT NOT NULL REFERENCES tags(name) ON DELETE RESTRICT,
+ PRIMARY KEY (node_id, tag_name)
+ );
+
CREATE TABLE IF NOT EXISTS node_edges (
source_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
target_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
@@ -953,6 +1066,32 @@ fn insert_node(tx: &Transaction<'_>, node: &DagNode) -> Result<(), StoreError> {
for annotation in &node.annotations {
insert_annotation(tx, node.id, annotation)?;
}
+ for tag in &node.tags {
+ insert_node_tag(tx, node.id, tag)?;
+ }
+ Ok(())
+}
+
+fn insert_tag(tx: &Transaction<'_>, tag: &TagRecord) -> Result<(), StoreError> {
+ let existing = tx
+ .query_row(
+ "SELECT 1 FROM tags WHERE name = ?1",
+ params![tag.name.as_str()],
+ |row| row.get::<_, i64>(0),
+ )
+ .optional()?;
+ if existing.is_some() {
+ return Err(StoreError::DuplicateTag(tag.name.clone()));
+ }
+ let _ = tx.execute(
+ "INSERT INTO tags (name, description, created_at)
+ VALUES (?1, ?2, ?3)",
+ params![
+ tag.name.as_str(),
+ tag.description.as_str(),
+ encode_timestamp(tag.created_at)?,
+ ],
+ )?;
Ok(())
}
@@ -976,6 +1115,32 @@ fn insert_annotation(
Ok(())
}
+fn insert_node_tag(
+ tx: &Transaction<'_>,
+ node_id: fidget_spinner_core::NodeId,
+ tag: &TagName,
+) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "INSERT INTO node_tags (node_id, tag_name)
+ VALUES (?1, ?2)",
+ params![node_id.to_string(), tag.as_str()],
+ )?;
+ Ok(())
+}
+
+fn ensure_known_tags(tx: &Transaction<'_>, tags: &BTreeSet<TagName>) -> Result<(), StoreError> {
+ let mut statement = tx.prepare("SELECT 1 FROM tags WHERE name = ?1")?;
+ for tag in tags {
+ let exists = statement
+ .query_row(params![tag.as_str()], |row| row.get::<_, i64>(0))
+ .optional()?;
+ if exists.is_none() {
+ return Err(StoreError::UnknownTag(tag.clone()));
+ }
+ }
+ Ok(())
+}
+
fn insert_edge(tx: &Transaction<'_>, edge: &DagEdge) -> Result<(), StoreError> {
let _ = tx.execute(
"INSERT OR IGNORE INTO node_edges (source_id, target_id, kind)
@@ -1248,6 +1413,7 @@ fn read_node_row(row: &rusqlite::Row<'_>) -> Result<DagNode, rusqlite::Error> {
.map(NonEmptyText::new)
.transpose()
.map_err(core_to_sql_conversion_error)?,
+ tags: BTreeSet::new(),
payload,
annotations: Vec::new(),
diagnostics,
@@ -1626,6 +1792,8 @@ fn encode_frontier_verdict(verdict: FrontierVerdict) -> &'static str {
#[cfg(test)]
mod tests {
+ use std::collections::BTreeSet;
+
use serde_json::json;
use super::{
@@ -1633,7 +1801,7 @@ mod tests {
};
use fidget_spinner_core::{
CheckpointSnapshotRef, EvaluationProtocol, FrontierContract, MetricSpec, MetricUnit,
- NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective,
+ NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective, TagName,
};
fn temp_project_root(label: &str) -> camino::Utf8PathBuf {
@@ -1672,6 +1840,7 @@ mod tests {
frontier_id: None,
title: NonEmptyText::new("feature sketch")?,
summary: Some(NonEmptyText::new("research note")?),
+ tags: None,
payload: NodePayload::with_schema(
store.schema().schema_ref(),
super::json_object(json!({"body": "freeform"}))?,
@@ -1708,15 +1877,13 @@ mod tests {
contract: FrontierContract {
objective: NonEmptyText::new("improve wall time")?,
evaluation: EvaluationProtocol {
- benchmark_suites: std::collections::BTreeSet::from([NonEmptyText::new(
- "smoke",
- )?]),
+ benchmark_suites: BTreeSet::from([NonEmptyText::new("smoke")?]),
primary_metric: MetricSpec {
metric_key: NonEmptyText::new("wall_clock_s")?,
unit: MetricUnit::Seconds,
objective: OptimizationObjective::Minimize,
},
- supporting_metrics: std::collections::BTreeSet::new(),
+ supporting_metrics: BTreeSet::new(),
},
promotion_criteria: vec![NonEmptyText::new("strict speedup")?],
},
@@ -1748,6 +1915,7 @@ mod tests {
frontier_id: None,
title: NonEmptyText::new("quick note")?,
summary: None,
+ tags: Some(BTreeSet::new()),
payload: NodePayload::with_schema(
store.schema().schema_ref(),
super::json_object(json!({"body": "hello"}))?,
@@ -1783,15 +1951,13 @@ mod tests {
contract: FrontierContract {
objective: NonEmptyText::new("optimize")?,
evaluation: EvaluationProtocol {
- benchmark_suites: std::collections::BTreeSet::from([NonEmptyText::new(
- "smoke",
- )?]),
+ benchmark_suites: BTreeSet::from([NonEmptyText::new("smoke")?]),
primary_metric: MetricSpec {
metric_key: NonEmptyText::new("wall_clock_s")?,
unit: MetricUnit::Seconds,
objective: OptimizationObjective::Minimize,
},
- supporting_metrics: std::collections::BTreeSet::new(),
+ supporting_metrics: BTreeSet::new(),
},
promotion_criteria: vec![NonEmptyText::new("faster")?],
},
@@ -1807,4 +1973,75 @@ mod tests {
assert_eq!(nodes[0].class, NodeClass::Contract);
Ok(())
}
+
+ #[test]
+ fn notes_require_explicit_tags_even_when_empty() -> Result<(), super::StoreError> {
+ let root = temp_project_root("note-tags-required");
+ let mut store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+
+ let result = store.add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: None,
+ title: NonEmptyText::new("quick note")?,
+ summary: None,
+ tags: None,
+ payload: NodePayload::with_schema(
+ store.schema().schema_ref(),
+ super::json_object(json!({"body": "hello"}))?,
+ ),
+ annotations: Vec::new(),
+ attachments: Vec::new(),
+ });
+
+ assert!(matches!(result, Err(super::StoreError::NoteTagsRequired)));
+ Ok(())
+ }
+
+ #[test]
+ fn tags_round_trip_and_filter_node_list() -> Result<(), super::StoreError> {
+ let root = temp_project_root("tag-roundtrip");
+ let mut store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+ let cuts = store.add_tag(
+ TagName::new("cuts/core")?,
+ NonEmptyText::new("Core cutset work")?,
+ )?;
+ let heuristics = store.add_tag(
+ TagName::new("heuristic")?,
+ NonEmptyText::new("Heuristic tuning")?,
+ )?;
+ let note = store.add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: None,
+ title: NonEmptyText::new("tagged note")?,
+ summary: None,
+ tags: Some(BTreeSet::from([cuts.name.clone(), heuristics.name.clone()])),
+ payload: NodePayload::with_schema(
+ store.schema().schema_ref(),
+ super::json_object(json!({"body": "tagged"}))?,
+ ),
+ annotations: Vec::new(),
+ attachments: Vec::new(),
+ })?;
+
+ let loaded = store
+ .get_node(note.id)?
+ .ok_or(super::StoreError::NodeNotFound(note.id))?;
+ assert_eq!(loaded.tags.len(), 2);
+
+ let filtered = store.list_nodes(ListNodesQuery {
+ tags: BTreeSet::from([cuts.name]),
+ ..ListNodesQuery::default()
+ })?;
+ assert_eq!(filtered.len(), 1);
+ assert_eq!(filtered[0].tags.len(), 2);
+ Ok(())
+ }
}