swarm repositories / source
aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock360
-rw-r--r--Cargo.toml9
-rw-r--r--README.md84
-rw-r--r--crates/fidget-spinner-cli/Cargo.toml12
-rw-r--r--crates/fidget-spinner-cli/src/main.rs273
-rw-r--r--crates/fidget-spinner-cli/src/ui.rs600
-rw-r--r--crates/fidget-spinner-core/Cargo.toml6
-rw-r--r--crates/fidget-spinner-core/src/error.rs6
-rw-r--r--crates/fidget-spinner-core/src/lib.rs11
-rw-r--r--crates/fidget-spinner-core/src/model.rs178
-rw-r--r--crates/fidget-spinner-store-sqlite/Cargo.toml6
-rw-r--r--crates/fidget-spinner-store-sqlite/src/lib.rs285
-rw-r--r--docs/architecture.md45
-rw-r--r--docs/product-spec.md10
14 files changed, 1814 insertions, 71 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 1dc802c..f43a129 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -59,6 +59,64 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]]
+name = "atomic-waker"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
+
+[[package]]
+name = "axum"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
+dependencies = [
+ "axum-core",
+ "bytes",
+ "form_urlencoded",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-body-util",
+ "hyper",
+ "hyper-util",
+ "itoa",
+ "matchit",
+ "memchr",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "serde_core",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_urlencoded",
+ "sync_wrapper",
+ "tokio",
+ "tower",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "axum-core"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http",
+ "http-body",
+ "http-body-util",
+ "mime",
+ "pin-project-lite",
+ "sync_wrapper",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
name = "bitflags"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -217,15 +275,19 @@ checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
name = "fidget-spinner-cli"
version = "0.1.0"
dependencies = [
+ "axum",
"camino",
"clap",
"dirs",
"fidget-spinner-core",
"fidget-spinner-store-sqlite",
"libmcp",
+ "linkify",
+ "maud",
"serde",
"serde_json",
"time",
+ "tokio",
"uuid",
]
@@ -277,6 +339,39 @@ dependencies = [
]
[[package]]
+name = "futures-channel"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
+dependencies = [
+ "futures-core",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
+
+[[package]]
+name = "futures-task"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
+
+[[package]]
+name = "futures-util"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "pin-project-lite",
+ "slab",
+]
+
+[[package]]
name = "getrandom"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -331,6 +426,87 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
+name = "http"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
+dependencies = [
+ "bytes",
+ "itoa",
+]
+
+[[package]]
+name = "http-body"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
+dependencies = [
+ "bytes",
+ "http",
+]
+
+[[package]]
+name = "http-body-util"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http",
+ "http-body",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "httparse"
+version = "1.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
+
+[[package]]
+name = "httpdate"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
+
+[[package]]
+name = "hyper"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "http",
+ "http-body",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "pin-project-lite",
+ "pin-utils",
+ "smallvec",
+ "tokio",
+]
+
+[[package]]
+name = "hyper-util"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
+dependencies = [
+ "bytes",
+ "http",
+ "http-body",
+ "hyper",
+ "pin-project-lite",
+ "tokio",
+ "tower-service",
+]
+
+[[package]]
name = "icu_collections"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -487,6 +663,7 @@ checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d"
[[package]]
name = "libmcp"
version = "1.1.0"
+source = "git+https://git.swarm.moe/libmcp.git?rev=84e898d9ba699451d5d13fe384e7bbe220564bc1#84e898d9ba699451d5d13fe384e7bbe220564bc1"
dependencies = [
"schemars",
"serde",
@@ -517,6 +694,15 @@ dependencies = [
]
[[package]]
+name = "linkify"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1dfa36d52c581e9ec783a7ce2a5e0143da6237be5811a0b3153fedfdbe9f780"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
name = "litemap"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -529,12 +715,59 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
+name = "matchit"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
+
+[[package]]
+name = "maud"
+version = "0.27.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8156733e27020ea5c684db5beac5d1d611e1272ab17901a49466294b84fc217e"
+dependencies = [
+ "axum-core",
+ "http",
+ "itoa",
+ "maud_macros",
+]
+
+[[package]]
+name = "maud_macros"
+version = "0.27.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7261b00f3952f617899bc012e3dbd56e4f0110a038175929fa5d18e5a19913ca"
+dependencies = [
+ "proc-macro2",
+ "proc-macro2-diagnostics",
+ "quote",
+ "syn",
+]
+
+[[package]]
name = "memchr"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
[[package]]
+name = "mime"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+
+[[package]]
+name = "mio"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
+dependencies = [
+ "libc",
+ "wasi",
+ "windows-sys",
+]
+
+[[package]]
name = "num-conv"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -571,6 +804,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd"
[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
name = "pkg-config"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -611,6 +850,18 @@ dependencies = [
]
[[package]]
+name = "proc-macro2-diagnostics"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "version_check",
+]
+
+[[package]]
name = "quote"
version = "1.0.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -678,6 +929,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
[[package]]
+name = "ryu"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f"
+
+[[package]]
name = "schemars"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -763,18 +1020,57 @@ dependencies = [
]
[[package]]
+name = "serde_path_to_error"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
+dependencies = [
+ "itoa",
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
+name = "slab"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5"
+
+[[package]]
name = "smallvec"
version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
+name = "socket2"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e"
+dependencies = [
+ "libc",
+ "windows-sys",
+]
+
+[[package]]
name = "stable_deref_trait"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -798,6 +1094,12 @@ dependencies = [
]
[[package]]
+name = "sync_wrapper"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
+
+[[package]]
name = "synstructure"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -876,8 +1178,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d"
dependencies = [
"bytes",
+ "libc",
+ "mio",
"pin-project-lite",
+ "socket2",
"tokio-macros",
+ "windows-sys",
]
[[package]]
@@ -892,6 +1198,54 @@ dependencies = [
]
[[package]]
+name = "tower"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "pin-project-lite",
+ "sync_wrapper",
+ "tokio",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "tower-layer"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
+
+[[package]]
+name = "tower-service"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
+
+[[package]]
+name = "tracing"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
+dependencies = [
+ "log",
+ "pin-project-lite",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
name = "unicode-ident"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -946,6 +1300,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
+name = "version_check"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
+
+[[package]]
name = "wasi"
version = "0.11.1+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index d829377..32172ad 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -7,20 +7,29 @@ members = [
resolver = "3"
[workspace.package]
+categories = ["development-tools", "command-line-utilities"]
+description = "Local-first experimental DAG, MCP server, and research CLI for long-running optimization work."
edition = "2024"
+keywords = ["mcp", "research", "optimization", "dag", "tooling"]
license = "MIT"
+readme = "README.md"
+repository = "https://git.swarm.moe/fidget_spinner.git"
rust-version = "1.94"
version = "0.1.0"
[workspace.dependencies]
+axum = "0.8"
camino = { version = "1", features = ["serde1"] }
clap = { version = "4.5", features = ["derive"] }
dirs = "6"
+linkify = "0.10"
+maud = { version = "0.27", features = ["axum"] }
rusqlite = { version = "0.37", features = ["bundled", "time"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1"
thiserror = "2"
time = { version = "0.3", features = ["formatting", "macros", "parsing", "serde"] }
+tokio = { version = "1", features = ["net", "rt-multi-thread"] }
uuid = { version = "1", features = ["serde", "v7"] }
[workspace.lints.rust]
diff --git a/README.md b/README.md
index 2651497..4540fcd 100644
--- a/README.md
+++ b/README.md
@@ -3,28 +3,30 @@
Fidget Spinner is a local-first, agent-first experimental DAG for autonomous
program optimization and research.
-The current MVP is built around four ideas:
+It is aimed at the ugly, practical problem of replacing sprawling experiment
+markdown in worktree-heavy optimization projects such as `libgrid` with a
+structured local system of record.
+
+The current shape is built around four ideas:
- the DAG is canonical truth
- frontier state is a derived projection
- project payload schemas are local and flexible
- core-path experiment closure is atomic
-The immediate target is not open-ended science in the abstract. It is the ugly,
-practical problem of replacing gigantic freeform experiment markdown in
-worktree-heavy optimization projects such as `libgrid`.
-
-## Current MVP
+## Current Scope
Implemented today:
- typed Rust core model
- per-project SQLite store under `.fidget_spinner/`
- project-local schema file
+- light-touch project field types: `string`, `numeric`, `boolean`, `timestamp`
- hidden and visible node annotations
- core-path and off-path node classes
-- CLI for bootstrap and repair
+- CLI for local project work
- hardened stdio MCP host via `mcp serve`
+- minimal web navigator via `ui serve`
- replay-aware disposable MCP worker runtime
- MCP health and telemetry tools
- bundled `fidget-spinner` base skill
@@ -33,7 +35,7 @@ Implemented today:
Not implemented yet:
- long-lived daemon
-- web UI
+- full web UI
- remote runners
- strong markdown migration
- cross-project indexing
@@ -64,28 +66,54 @@ cargo run -p fidget-spinner-cli -- frontier init \
Record low-ceremony off-path work:
```bash
+cargo run -p fidget-spinner-cli -- tag add \
+ --project . \
+ --name dogfood/mvp \
+ --description "Self-hosted MVP dogfood notes"
+```
+
+```bash
cargo run -p fidget-spinner-cli -- research add \
--project . \
--title "next feature slate" \
--body "Investigate pruning, richer projections, and libgrid schema presets."
```
+```bash
+cargo run -p fidget-spinner-cli -- note quick \
+ --project . \
+ --title "first tagged note" \
+ --body "Tag-aware note capture is live." \
+ --tag dogfood/mvp
+```
+
Serve the local MCP surface in unbound mode:
```bash
cargo run -p fidget-spinner-cli -- mcp serve
```
+Serve the minimal local navigator:
+
+```bash
+cargo run -p fidget-spinner-cli -- ui serve --project . --bind 127.0.0.1:8913
+```
+
Then bind the session from the client with:
```json
{"name":"project.bind","arguments":{"path":"<project-root-or-nested-path>"}}
```
+If the target root is an existing empty directory, `project.bind` now
+bootstraps `.fidget_spinner/` automatically instead of requiring a separate
+`init` step. Non-empty uninitialized directories still fail rather than being
+guessed into existence.
+
Install the bundled skills into Codex:
```bash
-./scripts/install-codex-skill.sh
+cargo run -p fidget-spinner-cli -- skill install
```
## Store Layout
@@ -101,7 +129,12 @@ Each initialized project gets:
```
`schema.json` is the model-facing contract for project-local payload fields and
-their validation tiers.
+their validation tiers. Fields may now optionally declare a light-touch
+`value_type` of `string`, `numeric`, `boolean`, or `timestamp`; mismatches are
+diagnostic warnings rather than ingest blockers.
+
+`.fidget_spinner/` is local state. In git-backed projects it usually belongs in
+`.gitignore` or `.git/info/exclude`.
## Model-Facing Surface
@@ -112,6 +145,8 @@ The current MCP tools are:
- `project.bind`
- `project.status`
- `project.schema`
+- `tag.add`
+- `tag.list`
- `frontier.list`
- `frontier.status`
- `frontier.init`
@@ -132,32 +167,35 @@ JSON-RPC session and delegates tool execution to an internal worker subprocess.
Safe replay is only allowed for explicitly read-only operations and resources.
Mutating tools are never auto-replayed after worker failure.
+Notes now require an explicit `tags` list. Tags are repo-local registry entries
+created with `tag.add`, each with a required human description. `note.quick`
+accepts `tags: []` when no existing tag applies, but the field itself is still
+mandatory so note classification is always conscious.
+
The intended flow is:
1. inspect `system.health`
2. `project.bind` to the target project root or any nested path inside it
-3. read the schema and frontier
-4. pull context from the DAG
-5. use cheap off-path writes liberally
-6. record a `change` before core-path work
-7. seal core-path work with one atomic `experiment.close`
+3. read `project.status`, `tag.list`, and `frontier.list`
+4. read `project.schema` only when payload rules are actually relevant
+5. pull context from the DAG
+6. use cheap off-path writes liberally
+7. record a `change` before core-path work
+8. seal core-path work with one atomic `experiment.close`
-## Dogfood Reality
+## Git-Backed Vs Plain Local Projects
-This repository is suitable for off-path dogfood even though it is not
-currently a git repo.
-
-That means:
+Off-path work does not require git. You can initialize a local project and use:
- `research add`
+- `tag add`
- `note quick`
- `node annotate`
- `mcp serve`
-all work here today.
-
Full core-path experiment closure needs a real git-backed project, such as the
-target `libgrid` worktree.
+target `libgrid` worktree, because checkpoints and champion capture are git
+backed.
## Workspace Layout
diff --git a/crates/fidget-spinner-cli/Cargo.toml b/crates/fidget-spinner-cli/Cargo.toml
index 51d3cd8..bf8ffb7 100644
--- a/crates/fidget-spinner-cli/Cargo.toml
+++ b/crates/fidget-spinner-cli/Cargo.toml
@@ -1,22 +1,30 @@
[package]
name = "fidget-spinner-cli"
-description = "Thin local entrypoint for Fidget Spinner"
+categories.workspace = true
+description = "CLI, MCP server, and local navigator for Fidget Spinner"
edition.workspace = true
+keywords.workspace = true
license.workspace = true
publish = false
+readme.workspace = true
+repository.workspace = true
rust-version.workspace = true
version.workspace = true
[dependencies]
+axum.workspace = true
camino.workspace = true
clap.workspace = true
dirs.workspace = true
fidget-spinner-core = { path = "../fidget-spinner-core" }
fidget-spinner-store-sqlite = { path = "../fidget-spinner-store-sqlite" }
-libmcp = { path = "../../../libmcp/crates/libmcp" }
+linkify.workspace = true
+libmcp = { git = "https://git.swarm.moe/libmcp.git", rev = "84e898d9ba699451d5d13fe384e7bbe220564bc1" }
+maud.workspace = true
serde.workspace = true
serde_json.workspace = true
time.workspace = true
+tokio.workspace = true
uuid.workspace = true
[lints]
diff --git a/crates/fidget-spinner-cli/src/main.rs b/crates/fidget-spinner-cli/src/main.rs
index 9b2b8ae..fe4cb5f 100644
--- a/crates/fidget-spinner-cli/src/main.rs
+++ b/crates/fidget-spinner-cli/src/main.rs
@@ -1,8 +1,10 @@
mod bundled_skill;
mod mcp;
+mod ui;
use std::collections::{BTreeMap, BTreeSet};
use std::fs;
+use std::net::SocketAddr;
use std::path::{Path, PathBuf};
use camino::{Utf8Path, Utf8PathBuf};
@@ -10,7 +12,7 @@ use clap::{Args, Parser, Subcommand, ValueEnum};
use fidget_spinner_core::{
AnnotationVisibility, CodeSnapshotRef, CommandRecipe, ExecutionBackend, FrontierContract,
FrontierNote, FrontierVerdict, GitCommitHash, MetricObservation, MetricSpec, MetricUnit,
- NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective,
+ NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective, TagName,
};
use fidget_spinner_store_sqlite::{
CloseExperimentRequest, CreateFrontierRequest, CreateNodeRequest, EdgeAttachment,
@@ -21,7 +23,11 @@ use serde_json::{Map, Value, json};
use uuid::Uuid;
#[derive(Parser)]
-#[command(author, version, about = "Fidget Spinner local project CLI")]
+#[command(
+ author,
+ version,
+ about = "Fidget Spinner CLI, MCP server, and local navigator"
+)]
struct Cli {
#[command(subcommand)]
command: Command,
@@ -29,29 +35,48 @@ struct Cli {
#[derive(Subcommand)]
enum Command {
+ /// Initialize a project-local `.fidget_spinner/` store.
Init(InitArgs),
+ /// Read the local project payload schema.
Schema {
#[command(subcommand)]
command: SchemaCommand,
},
+ /// Create and inspect frontiers.
Frontier {
#[command(subcommand)]
command: FrontierCommand,
},
+ /// Create, inspect, and mutate DAG nodes.
Node {
#[command(subcommand)]
command: NodeCommand,
},
+ /// Record terse off-path notes.
Note(NoteCommand),
+ /// Manage the repo-local tag registry.
+ Tag {
+ #[command(subcommand)]
+ command: TagCommand,
+ },
+ /// Record off-path research and enabling work.
Research(ResearchCommand),
+ /// Close a core-path experiment atomically.
Experiment {
#[command(subcommand)]
command: ExperimentCommand,
},
+ /// Serve the hardened stdio MCP endpoint.
Mcp {
#[command(subcommand)]
command: McpCommand,
},
+ /// Serve the minimal local web navigator.
+ Ui {
+ #[command(subcommand)]
+ command: UiCommand,
+ },
+ /// Inspect or install bundled Codex skills.
Skill {
#[command(subcommand)]
command: SkillCommand,
@@ -60,22 +85,28 @@ enum Command {
#[derive(Args)]
struct InitArgs {
+ /// Project root to initialize.
#[arg(long, default_value = ".")]
project: PathBuf,
+ /// Human-facing project name. Defaults to the directory name.
#[arg(long)]
name: Option<String>,
+ /// Payload schema namespace written into `.fidget_spinner/schema.json`.
#[arg(long, default_value = "local.project")]
namespace: String,
}
#[derive(Subcommand)]
enum SchemaCommand {
+ /// Show the current project schema as JSON.
Show(ProjectArg),
}
#[derive(Subcommand)]
enum FrontierCommand {
+ /// Create a frontier and root contract node.
Init(FrontierInitArgs),
+ /// Show one frontier projection or list frontiers when omitted.
Status(FrontierStatusArgs),
}
@@ -115,10 +146,15 @@ struct FrontierStatusArgs {
#[derive(Subcommand)]
enum NodeCommand {
+ /// Create a generic DAG node.
Add(NodeAddArgs),
+ /// List recent nodes.
List(NodeListArgs),
+ /// Show one node in full.
Show(NodeShowArgs),
+ /// Attach an annotation to a node.
Annotate(NodeAnnotateArgs),
+ /// Archive a node without deleting it.
Archive(NodeArchiveArgs),
}
@@ -138,6 +174,8 @@ struct NodeAddArgs {
payload_json: Option<String>,
#[arg(long = "payload-file")]
payload_file: Option<PathBuf>,
+ #[command(flatten)]
+ tag_selection: ExplicitTagSelectionArgs,
#[arg(long = "field")]
fields: Vec<String>,
#[arg(long = "annotation")]
@@ -154,12 +192,22 @@ struct NodeListArgs {
frontier: Option<String>,
#[arg(long, value_enum)]
class: Option<CliNodeClass>,
+ #[arg(long = "tag")]
+ tags: Vec<String>,
#[arg(long)]
include_archived: bool,
#[arg(long, default_value_t = 20)]
limit: u32,
}
+#[derive(Args, Default)]
+struct ExplicitTagSelectionArgs {
+ #[arg(long = "tag")]
+ tags: Vec<String>,
+ #[arg(long, conflicts_with = "tags")]
+ no_tags: bool,
+}
+
#[derive(Args)]
struct NodeShowArgs {
#[command(flatten)]
@@ -198,9 +246,18 @@ struct NoteCommand {
#[derive(Subcommand)]
enum NoteSubcommand {
+ /// Record a quick off-path note.
Quick(QuickNoteArgs),
}
+#[derive(Subcommand)]
+enum TagCommand {
+ /// Register a new repo-local tag.
+ Add(TagAddArgs),
+ /// List registered repo-local tags.
+ List(ProjectArg),
+}
+
#[derive(Args)]
struct ResearchCommand {
#[command(subcommand)]
@@ -209,6 +266,7 @@ struct ResearchCommand {
#[derive(Subcommand)]
enum ResearchSubcommand {
+ /// Record off-path research or enabling work.
Add(QuickResearchArgs),
}
@@ -222,11 +280,23 @@ struct QuickNoteArgs {
title: String,
#[arg(long)]
body: String,
+ #[command(flatten)]
+ tag_selection: ExplicitTagSelectionArgs,
#[arg(long = "parent")]
parents: Vec<String>,
}
#[derive(Args)]
+struct TagAddArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ #[arg(long)]
+ name: String,
+ #[arg(long)]
+ description: String,
+}
+
+#[derive(Args)]
struct QuickResearchArgs {
#[command(flatten)]
project: ProjectArg,
@@ -244,16 +314,24 @@ struct QuickResearchArgs {
#[derive(Subcommand)]
enum ExperimentCommand {
+ /// Close a core-path experiment with checkpoint, run, note, and verdict.
Close(ExperimentCloseArgs),
}
#[derive(Subcommand)]
enum McpCommand {
+ /// Serve the public stdio MCP host. If `--project` is omitted, the host starts unbound.
Serve(McpServeArgs),
#[command(hide = true)]
Worker(McpWorkerArgs),
}
+#[derive(Subcommand)]
+enum UiCommand {
+ /// Serve the local read-only navigator.
+ Serve(UiServeArgs),
+}
+
#[derive(Args)]
struct ExperimentCloseArgs {
#[command(flatten)]
@@ -304,33 +382,41 @@ struct ExperimentCloseArgs {
#[derive(Subcommand)]
enum SkillCommand {
+ /// List bundled skills.
List,
+ /// Install bundled skills into a Codex skill directory.
Install(SkillInstallArgs),
+ /// Print one bundled skill body.
Show(SkillShowArgs),
}
#[derive(Args)]
struct SkillInstallArgs {
+ /// Bundled skill name. Defaults to all bundled skills.
#[arg(long)]
name: Option<String>,
+ /// Destination root. Defaults to `~/.codex/skills`.
#[arg(long)]
destination: Option<PathBuf>,
}
#[derive(Args)]
struct SkillShowArgs {
+ /// Bundled skill name. Defaults to `fidget-spinner`.
#[arg(long)]
name: Option<String>,
}
#[derive(Args)]
struct ProjectArg {
+ /// Project root or any nested path inside a project containing `.fidget_spinner/`.
#[arg(long, default_value = ".")]
project: PathBuf,
}
#[derive(Args)]
struct McpServeArgs {
+ /// Optional initial project binding. When omitted, the MCP starts unbound.
#[arg(long)]
project: Option<PathBuf>,
}
@@ -341,6 +427,18 @@ struct McpWorkerArgs {
project: PathBuf,
}
+#[derive(Args)]
+struct UiServeArgs {
+ #[command(flatten)]
+ project: ProjectArg,
+ /// Bind address for the local navigator.
+ #[arg(long, default_value = "127.0.0.1:8913")]
+ bind: SocketAddr,
+ /// Maximum rows rendered in list views.
+ #[arg(long, default_value_t = 200)]
+ limit: u32,
+}
+
#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)]
enum CliNodeClass {
Contract,
@@ -416,6 +514,10 @@ fn run() -> Result<(), StoreError> {
Command::Note(command) => match command.command {
NoteSubcommand::Quick(args) => run_quick_note(args),
},
+ Command::Tag { command } => match command {
+ TagCommand::Add(args) => run_tag_add(args),
+ TagCommand::List(project) => run_tag_list(project),
+ },
Command::Research(command) => match command.command {
ResearchSubcommand::Add(args) => run_quick_research(args),
},
@@ -426,6 +528,9 @@ fn run() -> Result<(), StoreError> {
McpCommand::Serve(args) => mcp::serve(args.project),
McpCommand::Worker(args) => mcp::serve_worker(args.project),
},
+ Command::Ui { command } => match command {
+ UiCommand::Serve(args) => run_ui_serve(args),
+ },
Command::Skill { command } => match command {
SkillCommand::List => print_json(&bundled_skill::bundled_skill_summaries()),
SkillCommand::Install(args) => run_skill_install(args),
@@ -439,16 +544,17 @@ fn run() -> Result<(), StoreError> {
fn run_init(args: InitArgs) -> Result<(), StoreError> {
let project_root = utf8_path(args.project);
- let display_name = NonEmptyText::new(args.name.unwrap_or_else(|| {
- project_root
- .file_name()
- .map_or_else(|| "fidget-spinner-project".to_owned(), ToOwned::to_owned)
- }))?;
+ let display_name = args
+ .name
+ .map(NonEmptyText::new)
+ .transpose()?
+ .unwrap_or(default_display_name_for_root(&project_root)?);
let namespace = NonEmptyText::new(args.namespace)?;
let store = ProjectStore::init(&project_root, display_name, namespace)?;
println!("initialized {}", store.state_root());
println!("project: {}", store.config().display_name);
println!("schema: {}", store.state_root().join("schema.json"));
+ maybe_print_gitignore_hint(&project_root)?;
Ok(())
}
@@ -498,6 +604,7 @@ fn run_node_add(args: NodeAddArgs) -> Result<(), StoreError> {
.as_deref()
.map(parse_frontier_id)
.transpose()?;
+ let tags = optional_cli_tags(args.tag_selection, args.class == CliNodeClass::Note)?;
let payload = load_payload(
store.schema().schema_ref(),
args.payload_json,
@@ -514,6 +621,7 @@ fn run_node_add(args: NodeAddArgs) -> Result<(), StoreError> {
frontier_id,
title: NonEmptyText::new(args.title)?,
summary: args.summary.map(NonEmptyText::new).transpose()?,
+ tags,
payload,
annotations,
attachments: lineage_attachments(args.parents)?,
@@ -530,6 +638,7 @@ fn run_node_list(args: NodeListArgs) -> Result<(), StoreError> {
.map(parse_frontier_id)
.transpose()?,
class: args.class.map(Into::into),
+ tags: parse_tag_set(args.tags)?,
include_archived: args.include_archived,
limit: args.limit,
})?;
@@ -585,6 +694,7 @@ fn run_quick_note(args: QuickNoteArgs) -> Result<(), StoreError> {
.transpose()?,
title: NonEmptyText::new(args.title)?,
summary: None,
+ tags: Some(explicit_cli_tags(args.tag_selection)?),
payload,
annotations: Vec::new(),
attachments: lineage_attachments(args.parents)?,
@@ -592,6 +702,20 @@ fn run_quick_note(args: QuickNoteArgs) -> Result<(), StoreError> {
print_json(&node)
}
+fn run_tag_add(args: TagAddArgs) -> Result<(), StoreError> {
+ let mut store = open_store(&args.project.project)?;
+ let tag = store.add_tag(
+ TagName::new(args.name)?,
+ NonEmptyText::new(args.description)?,
+ )?;
+ print_json(&tag)
+}
+
+fn run_tag_list(args: ProjectArg) -> Result<(), StoreError> {
+ let store = open_store(&args.project)?;
+ print_json(&store.list_tags()?)
+}
+
fn run_quick_research(args: QuickResearchArgs) -> Result<(), StoreError> {
let mut store = open_store(&args.project.project)?;
let payload = NodePayload::with_schema(
@@ -607,6 +731,7 @@ fn run_quick_research(args: QuickResearchArgs) -> Result<(), StoreError> {
.transpose()?,
title: NonEmptyText::new(args.title)?,
summary: args.summary.map(NonEmptyText::new).transpose()?,
+ tags: None,
payload,
annotations: Vec::new(),
attachments: lineage_attachments(args.parents)?,
@@ -684,6 +809,10 @@ fn run_skill_install(args: SkillInstallArgs) -> Result<(), StoreError> {
Ok(())
}
+fn run_ui_serve(args: UiServeArgs) -> Result<(), StoreError> {
+ ui::serve(utf8_path(args.project.project), args.bind, args.limit)
+}
+
fn resolve_bundled_skill(
requested_name: Option<&str>,
) -> Result<bundled_skill::BundledSkill, StoreError> {
@@ -712,10 +841,88 @@ fn open_store(path: &Path) -> Result<ProjectStore, StoreError> {
ProjectStore::open(utf8_path(path.to_path_buf()))
}
+fn open_or_init_store_for_binding(path: &Path) -> Result<ProjectStore, StoreError> {
+ let requested_root = utf8_path(path.to_path_buf());
+ match ProjectStore::open(requested_root.clone()) {
+ Ok(store) => Ok(store),
+ Err(StoreError::MissingProjectStore(_)) => {
+ let project_root = binding_bootstrap_root(&requested_root)?;
+ if !is_empty_directory(&project_root)? {
+ return Err(StoreError::MissingProjectStore(requested_root));
+ }
+ ProjectStore::init(
+ &project_root,
+ default_display_name_for_root(&project_root)?,
+ default_namespace_for_root(&project_root)?,
+ )
+ }
+ Err(error) => Err(error),
+ }
+}
+
fn utf8_path(path: impl Into<PathBuf>) -> Utf8PathBuf {
Utf8PathBuf::from(path.into().to_string_lossy().into_owned())
}
+fn binding_bootstrap_root(path: &Utf8Path) -> Result<Utf8PathBuf, StoreError> {
+ match fs::metadata(path.as_std_path()) {
+ Ok(metadata) if metadata.is_file() => Ok(path
+ .parent()
+ .map_or_else(|| path.to_path_buf(), Utf8Path::to_path_buf)),
+ Ok(_) => Ok(path.to_path_buf()),
+ Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(path.to_path_buf()),
+ Err(error) => Err(StoreError::from(error)),
+ }
+}
+
+fn is_empty_directory(path: &Utf8Path) -> Result<bool, StoreError> {
+ match fs::metadata(path.as_std_path()) {
+ Ok(metadata) if metadata.is_dir() => {
+ let mut entries = fs::read_dir(path.as_std_path())?;
+ Ok(entries.next().transpose()?.is_none())
+ }
+ Ok(_) => Ok(false),
+ Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(false),
+ Err(error) => Err(StoreError::from(error)),
+ }
+}
+
+fn default_display_name_for_root(project_root: &Utf8Path) -> Result<NonEmptyText, StoreError> {
+ NonEmptyText::new(
+ project_root
+ .file_name()
+ .map_or_else(|| "fidget-spinner-project".to_owned(), ToOwned::to_owned),
+ )
+ .map_err(StoreError::from)
+}
+
+fn default_namespace_for_root(project_root: &Utf8Path) -> Result<NonEmptyText, StoreError> {
+ let slug = slugify_namespace_component(project_root.file_name().unwrap_or("project"));
+ NonEmptyText::new(format!("local.{slug}")).map_err(StoreError::from)
+}
+
+fn slugify_namespace_component(raw: &str) -> String {
+ let mut slug = String::new();
+ let mut previous_was_separator = false;
+ for character in raw.chars().flat_map(char::to_lowercase) {
+ if character.is_ascii_alphanumeric() {
+ slug.push(character);
+ previous_was_separator = false;
+ continue;
+ }
+ if !previous_was_separator {
+ slug.push('_');
+ previous_was_separator = true;
+ }
+ }
+ let slug = slug.trim_matches('_').to_owned();
+ if slug.is_empty() {
+ "project".to_owned()
+ } else {
+ slug
+ }
+}
+
fn to_text_vec(values: Vec<String>) -> Result<Vec<NonEmptyText>, StoreError> {
values
.into_iter()
@@ -728,6 +935,35 @@ fn to_text_set(values: Vec<String>) -> Result<BTreeSet<NonEmptyText>, StoreError
to_text_vec(values).map(BTreeSet::from_iter)
}
+fn parse_tag_set(values: Vec<String>) -> Result<BTreeSet<TagName>, StoreError> {
+ values
+ .into_iter()
+ .map(TagName::new)
+ .collect::<Result<BTreeSet<_>, _>>()
+ .map_err(StoreError::from)
+}
+
+fn explicit_cli_tags(selection: ExplicitTagSelectionArgs) -> Result<BTreeSet<TagName>, StoreError> {
+ optional_cli_tags(selection, true)?.ok_or(StoreError::NoteTagsRequired)
+}
+
+fn optional_cli_tags(
+ selection: ExplicitTagSelectionArgs,
+ required: bool,
+) -> Result<Option<BTreeSet<TagName>>, StoreError> {
+ if selection.no_tags {
+ return Ok(Some(BTreeSet::new()));
+ }
+ if selection.tags.is_empty() {
+ return if required {
+ Err(StoreError::NoteTagsRequired)
+ } else {
+ Ok(None)
+ };
+ }
+ Ok(Some(parse_tag_set(selection.tags)?))
+}
+
fn parse_env(values: Vec<String>) -> BTreeMap<String, String> {
values
.into_iter()
@@ -825,6 +1061,29 @@ fn run_git(project_root: &Utf8Path, args: &[&str]) -> Result<Option<String>, Sto
Ok(Some(text))
}
+fn maybe_print_gitignore_hint(project_root: &Utf8Path) -> Result<(), StoreError> {
+ if run_git(project_root, &["rev-parse", "--show-toplevel"])?.is_none() {
+ return Ok(());
+ }
+
+ let status = std::process::Command::new("git")
+ .arg("-C")
+ .arg(project_root.as_str())
+ .args(["check-ignore", "-q", ".fidget_spinner"])
+ .status()?;
+
+ match status.code() {
+ Some(0) => Ok(()),
+ Some(1) => {
+ println!(
+ "note: add `.fidget_spinner/` to `.gitignore` or `.git/info/exclude` if you do not want local state in `git status`"
+ );
+ Ok(())
+ }
+ _ => Ok(()),
+ }
+}
+
fn parse_metric_observation(raw: String) -> Result<MetricObservation, StoreError> {
let parts = raw.split(':').collect::<Vec<_>>();
if parts.len() != 4 {
diff --git a/crates/fidget-spinner-cli/src/ui.rs b/crates/fidget-spinner-cli/src/ui.rs
new file mode 100644
index 0000000..0cb9c05
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/ui.rs
@@ -0,0 +1,600 @@
+use std::collections::BTreeMap;
+use std::io;
+use std::net::SocketAddr;
+
+use axum::Router;
+use axum::extract::{Query, State};
+use axum::http::StatusCode;
+use axum::response::{Html, IntoResponse, Response};
+use axum::routing::get;
+use camino::Utf8PathBuf;
+use fidget_spinner_core::{DagNode, FieldValueType, NodeClass, ProjectSchema, TagName};
+use linkify::{LinkFinder, LinkKind};
+use maud::{DOCTYPE, Markup, PreEscaped, html};
+use serde::Deserialize;
+use serde_json::Value;
+use time::OffsetDateTime;
+use time::format_description::well_known::Rfc3339;
+
+use crate::{open_store, to_pretty_json};
+
+#[derive(Clone)]
+struct NavigatorState {
+ project_root: Utf8PathBuf,
+ limit: u32,
+}
+
+#[derive(Debug, Default, Deserialize)]
+struct NavigatorQuery {
+ tag: Option<String>,
+}
+
+struct NavigatorEntry {
+ node: DagNode,
+ frontier_label: Option<String>,
+}
+
+struct TagFacet {
+ name: TagName,
+ description: String,
+ count: usize,
+}
+
+pub(crate) fn serve(
+ project_root: Utf8PathBuf,
+ bind: SocketAddr,
+ limit: u32,
+) -> Result<(), fidget_spinner_store_sqlite::StoreError> {
+ let runtime = tokio::runtime::Builder::new_multi_thread()
+ .enable_io()
+ .build()
+ .map_err(fidget_spinner_store_sqlite::StoreError::from)?;
+ runtime.block_on(async move {
+ let state = NavigatorState {
+ project_root,
+ limit,
+ };
+ let app = Router::new()
+ .route("/", get(navigator))
+ .with_state(state.clone());
+ let listener = tokio::net::TcpListener::bind(bind)
+ .await
+ .map_err(fidget_spinner_store_sqlite::StoreError::from)?;
+ println!("navigator: http://{bind}/");
+ axum::serve(listener, app).await.map_err(|error| {
+ fidget_spinner_store_sqlite::StoreError::Io(io::Error::other(error.to_string()))
+ })
+ })
+}
+
+async fn navigator(
+ State(state): State<NavigatorState>,
+ Query(query): Query<NavigatorQuery>,
+) -> Response {
+ match render_navigator(state, query) {
+ Ok(markup) => Html(markup.into_string()).into_response(),
+ Err(error) => (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ format!("navigator render failed: {error}"),
+ )
+ .into_response(),
+ }
+}
+
+fn render_navigator(
+ state: NavigatorState,
+ query: NavigatorQuery,
+) -> Result<Markup, fidget_spinner_store_sqlite::StoreError> {
+ let store = open_store(state.project_root.as_std_path())?;
+ let selected_tag = query.tag.map(TagName::new).transpose()?;
+ let schema = store.schema().clone();
+ let frontiers = store
+ .list_frontiers()?
+ .into_iter()
+ .map(|frontier| (frontier.id, frontier.label.to_string()))
+ .collect::<BTreeMap<_, _>>();
+
+ let recent_nodes = load_recent_nodes(&store, None, state.limit)?;
+ let visible_nodes = load_recent_nodes(&store, selected_tag.clone(), state.limit)?;
+ let tag_facets = store
+ .list_tags()?
+ .into_iter()
+ .map(|tag| TagFacet {
+ count: recent_nodes
+ .iter()
+ .filter(|node| node.tags.contains(&tag.name))
+ .count(),
+ description: tag.description.to_string(),
+ name: tag.name,
+ })
+ .collect::<Vec<_>>();
+ let entries = visible_nodes
+ .into_iter()
+ .map(|node| NavigatorEntry {
+ frontier_label: node
+ .frontier_id
+ .and_then(|frontier_id| frontiers.get(&frontier_id).cloned()),
+ node,
+ })
+ .collect::<Vec<_>>();
+
+ let title = selected_tag.as_ref().map_or_else(
+ || "all recent nodes".to_owned(),
+ |tag| format!("tag: {tag}"),
+ );
+ let project_name = store.config().display_name.to_string();
+
+ Ok(html! {
+ (DOCTYPE)
+ html {
+ head {
+ meta charset="utf-8";
+ meta name="viewport" content="width=device-width, initial-scale=1";
+ title { "Fidget Spinner Navigator" }
+ style { (PreEscaped(stylesheet().to_owned())) }
+ }
+ body {
+ main class="shell" {
+ aside class="rail" {
+ h1 { "Navigator" }
+ p class="project" { (project_name) }
+ nav class="tag-list" {
+ a
+ href="/"
+ class={ "tag-link " (if selected_tag.is_none() { "selected" } else { "" }) } {
+ span class="tag-name" { "all" }
+ span class="tag-count" { (recent_nodes.len()) }
+ }
+ @for facet in &tag_facets {
+ a
+ href={ "/?tag=" (facet.name.as_str()) }
+ class={ "tag-link " (if selected_tag.as_ref() == Some(&facet.name) { "selected" } else { "" }) } {
+ span class="tag-name" { (facet.name.as_str()) }
+ span class="tag-count" { (facet.count) }
+ span class="tag-description" { (facet.description.as_str()) }
+ }
+ }
+ }
+ }
+ section class="feed" {
+ header class="feed-header" {
+ h2 { (title) }
+ p class="feed-meta" {
+ (entries.len()) " shown"
+ " · "
+ (recent_nodes.len()) " recent"
+ " · "
+ (state.limit) " max"
+ }
+ }
+ @if entries.is_empty() {
+ article class="empty-state" {
+ h3 { "No matching nodes" }
+ p { "Try clearing the tag filter or recording new notes." }
+ }
+ } @else {
+ @for entry in &entries {
+ (render_entry(entry, &schema))
+ }
+ }
+ }
+ }
+ }
+ }
+ })
+}
+
+fn load_recent_nodes(
+ store: &fidget_spinner_store_sqlite::ProjectStore,
+ tag: Option<TagName>,
+ limit: u32,
+) -> Result<Vec<DagNode>, fidget_spinner_store_sqlite::StoreError> {
+ let summaries = store.list_nodes(fidget_spinner_store_sqlite::ListNodesQuery {
+ tags: tag.into_iter().collect(),
+ limit,
+ ..fidget_spinner_store_sqlite::ListNodesQuery::default()
+ })?;
+ summaries
+ .into_iter()
+ .map(|summary| {
+ store.get_node(summary.id)?.ok_or(
+ fidget_spinner_store_sqlite::StoreError::NodeNotFound(summary.id),
+ )
+ })
+ .collect()
+}
+
+fn render_entry(entry: &NavigatorEntry, schema: &ProjectSchema) -> Markup {
+ let body = entry.node.payload.field("body").and_then(Value::as_str);
+ let mut keys = entry
+ .node
+ .payload
+ .fields
+ .keys()
+ .filter(|name| name.as_str() != "body")
+ .cloned()
+ .collect::<Vec<_>>();
+ keys.sort_unstable();
+
+ html! {
+ article class="entry" id={ "node-" (entry.node.id) } {
+ header class="entry-header" {
+ div class="entry-title-row" {
+ span class={ "class-badge class-" (entry.node.class.as_str()) } {
+ (entry.node.class.as_str())
+ }
+ h3 class="entry-title" {
+ a href={ "#node-" (entry.node.id) } { (entry.node.title.as_str()) }
+ }
+ }
+ div class="entry-meta" {
+ span { (render_timestamp(entry.node.updated_at)) }
+ @if let Some(label) = &entry.frontier_label {
+ span { "frontier: " (label.as_str()) }
+ }
+ @if !entry.node.tags.is_empty() {
+ span class="tag-strip" {
+ @for tag in &entry.node.tags {
+ a class="entry-tag" href={ "/?tag=" (tag.as_str()) } { (tag.as_str()) }
+ }
+ }
+ }
+ }
+ }
+ @if let Some(summary) = &entry.node.summary {
+ p class="entry-summary" { (summary.as_str()) }
+ }
+ @if let Some(body) = body {
+ section class="entry-body" {
+ (render_string_value(body))
+ }
+ }
+ @if !keys.is_empty() {
+ dl class="field-list" {
+ @for key in &keys {
+ @if let Some(value) = entry.node.payload.field(key) {
+ (render_field(entry.node.class, schema, key, value))
+ }
+ }
+ }
+ }
+ @if !entry.node.diagnostics.items.is_empty() {
+ section class="diagnostics" {
+ h4 { "diagnostics" }
+ ul {
+ @for item in &entry.node.diagnostics.items {
+ li {
+ span class="diag-severity" { (format!("{:?}", item.severity).to_ascii_lowercase()) }
+ " "
+ (item.message.as_str())
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+fn render_field(class: NodeClass, schema: &ProjectSchema, key: &str, value: &Value) -> Markup {
+ let value_type = schema
+ .field_spec(class, key)
+ .and_then(|field| field.value_type);
+ let is_plottable = schema
+ .field_spec(class, key)
+ .is_some_and(|field| field.is_plottable());
+ html! {
+ dt {
+ (key)
+ @if let Some(value_type) = value_type {
+ span class="field-type" { (value_type.as_str()) }
+ }
+ @if is_plottable {
+ span class="field-type plottable" { "plot" }
+ }
+ }
+ dd {
+ @match value_type {
+ Some(FieldValueType::String) => {
+ @if let Some(text) = value.as_str() {
+ (render_string_value(text))
+ } @else {
+ (render_json_value(value))
+ }
+ }
+ Some(FieldValueType::Numeric) => {
+ @if let Some(number) = value.as_f64() {
+ code class="numeric" { (number) }
+ } @else {
+ (render_json_value(value))
+ }
+ }
+ Some(FieldValueType::Boolean) => {
+ @if let Some(boolean) = value.as_bool() {
+ span class={ "boolean " (if boolean { "true" } else { "false" }) } {
+ (if boolean { "true" } else { "false" })
+ }
+ } @else {
+ (render_json_value(value))
+ }
+ }
+ Some(FieldValueType::Timestamp) => {
+ @if let Some(raw) = value.as_str() {
+ time datetime=(raw) { (render_timestamp_value(raw)) }
+ } @else {
+ (render_json_value(value))
+ }
+ }
+ None => (render_json_value(value)),
+ }
+ }
+ }
+}
+
+fn render_string_value(text: &str) -> Markup {
+ let finder = LinkFinder::new();
+ html! {
+ div class="rich-text" {
+ @for line in text.lines() {
+ p {
+ @for span in finder.spans(line) {
+ @match span.kind() {
+ Some(LinkKind::Url) => a href=(span.as_str()) { (span.as_str()) },
+ _ => (span.as_str()),
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+fn render_json_value(value: &Value) -> Markup {
+ let text = to_pretty_json(value).unwrap_or_else(|_| value.to_string());
+ html! {
+ pre class="json-value" { (text) }
+ }
+}
+
+fn render_timestamp(timestamp: OffsetDateTime) -> String {
+ timestamp
+ .format(&Rfc3339)
+ .unwrap_or_else(|_| timestamp.to_string())
+}
+
+fn render_timestamp_value(raw: &str) -> String {
+ OffsetDateTime::parse(raw, &Rfc3339)
+ .map(render_timestamp)
+ .unwrap_or_else(|_| raw.to_owned())
+}
+
+fn stylesheet() -> &'static str {
+ r#"
+ :root {
+ color-scheme: light;
+ --bg: #f6f3ec;
+ --panel: #fffdf8;
+ --line: #d8d1c4;
+ --text: #22201a;
+ --muted: #746e62;
+ --accent: #2d5c4d;
+ --accent-soft: #dbe8e2;
+ --tag: #ece5d8;
+ --warn: #8b5b24;
+ }
+
+ * { box-sizing: border-box; }
+
+ body {
+ margin: 0;
+ background: var(--bg);
+ color: var(--text);
+ font: 15px/1.5 "Iosevka Web", "IBM Plex Mono", "SFMono-Regular", monospace;
+ }
+
+ a {
+ color: var(--accent);
+ text-decoration: none;
+ }
+
+ a:hover {
+ text-decoration: underline;
+ }
+
+ .shell {
+ display: grid;
+ grid-template-columns: 18rem minmax(0, 1fr);
+ min-height: 100vh;
+ }
+
+ .rail {
+ border-right: 1px solid var(--line);
+ padding: 1.25rem 1rem;
+ position: sticky;
+ top: 0;
+ align-self: start;
+ height: 100vh;
+ overflow: auto;
+ background: rgba(255, 253, 248, 0.85);
+ backdrop-filter: blur(6px);
+ }
+
+ .project, .feed-meta, .entry-meta, .entry-summary, .tag-description {
+ color: var(--muted);
+ }
+
+ .tag-list {
+ display: grid;
+ gap: 0.5rem;
+ }
+
+ .tag-link {
+ display: grid;
+ grid-template-columns: minmax(0, 1fr) auto;
+ gap: 0.2rem 0.75rem;
+ padding: 0.55rem 0.7rem;
+ border: 1px solid var(--line);
+ background: var(--panel);
+ }
+
+ .tag-link.selected {
+ border-color: var(--accent);
+ background: var(--accent-soft);
+ }
+
+ .tag-name {
+ font-weight: 700;
+ overflow-wrap: anywhere;
+ }
+
+ .tag-count {
+ color: var(--muted);
+ }
+
+ .tag-description {
+ grid-column: 1 / -1;
+ font-size: 0.9rem;
+ }
+
+ .feed {
+ padding: 1.5rem;
+ display: grid;
+ gap: 1rem;
+ }
+
+ .feed-header {
+ padding-bottom: 0.5rem;
+ border-bottom: 1px solid var(--line);
+ }
+
+ .entry, .empty-state {
+ background: var(--panel);
+ border: 1px solid var(--line);
+ padding: 1rem 1.1rem;
+ }
+
+ .entry-header {
+ display: grid;
+ gap: 0.35rem;
+ margin-bottom: 0.75rem;
+ }
+
+ .entry-title-row {
+ display: flex;
+ gap: 0.75rem;
+ align-items: baseline;
+ }
+
+ .entry-title {
+ margin: 0;
+ font-size: 1.05rem;
+ }
+
+ .entry-meta {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 0.75rem;
+ font-size: 0.9rem;
+ }
+
+ .class-badge, .field-type, .entry-tag {
+ display: inline-block;
+ padding: 0.08rem 0.4rem;
+ border: 1px solid var(--line);
+ background: var(--tag);
+ font-size: 0.82rem;
+ }
+
+ .field-type.plottable {
+ background: var(--accent-soft);
+ border-color: var(--accent);
+ }
+
+ .tag-strip {
+ display: inline-flex;
+ flex-wrap: wrap;
+ gap: 0.35rem;
+ }
+
+ .entry-body {
+ margin-bottom: 0.9rem;
+ }
+
+ .rich-text p {
+ margin: 0 0 0.55rem;
+ }
+
+ .rich-text p:last-child {
+ margin-bottom: 0;
+ }
+
+ .field-list {
+ display: grid;
+ grid-template-columns: minmax(12rem, 18rem) minmax(0, 1fr);
+ gap: 0.55rem 1rem;
+ margin: 0;
+ }
+
+ .field-list dt {
+ font-weight: 700;
+ display: flex;
+ gap: 0.4rem;
+ align-items: center;
+ overflow-wrap: anywhere;
+ }
+
+ .field-list dd {
+ margin: 0;
+ }
+
+ .json-value {
+ margin: 0;
+ padding: 0.6rem 0.7rem;
+ background: #f3eee4;
+ overflow: auto;
+ }
+
+ .boolean.true { color: var(--accent); }
+ .boolean.false { color: #8a2f2f; }
+ .numeric { font-size: 1rem; }
+
+ .diagnostics {
+ margin-top: 1rem;
+ padding-top: 0.8rem;
+ border-top: 1px dashed var(--line);
+ }
+
+ .diagnostics h4 {
+ margin: 0 0 0.4rem;
+ font-size: 0.9rem;
+ text-transform: lowercase;
+ }
+
+ .diagnostics ul {
+ margin: 0;
+ padding-left: 1.1rem;
+ }
+
+ .diag-severity {
+ color: var(--warn);
+ font-weight: 700;
+ }
+
+ @media (max-width: 900px) {
+ .shell {
+ grid-template-columns: 1fr;
+ }
+
+ .rail {
+ position: static;
+ height: auto;
+ border-right: 0;
+ border-bottom: 1px solid var(--line);
+ }
+
+ .field-list {
+ grid-template-columns: 1fr;
+ }
+ }
+ "#
+}
diff --git a/crates/fidget-spinner-core/Cargo.toml b/crates/fidget-spinner-core/Cargo.toml
index b472b91..c147ee2 100644
--- a/crates/fidget-spinner-core/Cargo.toml
+++ b/crates/fidget-spinner-core/Cargo.toml
@@ -1,9 +1,13 @@
[package]
name = "fidget-spinner-core"
-description = "Core domain model for a local-first experimental DAG"
+categories.workspace = true
+description = "Core domain model for the Fidget Spinner experimental DAG"
edition.workspace = true
+keywords.workspace = true
license.workspace = true
publish = false
+readme.workspace = true
+repository.workspace = true
rust-version.workspace = true
version.workspace = true
diff --git a/crates/fidget-spinner-core/src/error.rs b/crates/fidget-spinner-core/src/error.rs
index 8e976c7..eb05ba7 100644
--- a/crates/fidget-spinner-core/src/error.rs
+++ b/crates/fidget-spinner-core/src/error.rs
@@ -4,6 +4,12 @@ use thiserror::Error;
pub enum CoreError {
#[error("text values must not be blank")]
EmptyText,
+ #[error("tag names must not be blank")]
+ EmptyTagName,
+ #[error(
+ "invalid tag name `{0}`; expected lowercase ascii alphanumerics separated by `-`, `_`, or `/`"
+ )]
+ InvalidTagName(String),
#[error("command recipes must contain at least one argv element")]
EmptyCommand,
}
diff --git a/crates/fidget-spinner-core/src/lib.rs b/crates/fidget-spinner-core/src/lib.rs
index f368268..b5e2b23 100644
--- a/crates/fidget-spinner-core/src/lib.rs
+++ b/crates/fidget-spinner-core/src/lib.rs
@@ -18,9 +18,10 @@ pub use crate::model::{
AdmissionState, AnnotationVisibility, ArtifactKind, ArtifactRef, CheckpointDisposition,
CheckpointRecord, CheckpointSnapshotRef, CodeSnapshotRef, CommandRecipe, CompletedExperiment,
DagEdge, DagNode, DiagnosticSeverity, EdgeKind, EvaluationProtocol, ExecutionBackend,
- ExperimentResult, FieldPresence, FieldRole, FrontierContract, FrontierNote, FrontierProjection,
- FrontierRecord, FrontierStatus, FrontierVerdict, GitCommitHash, InferencePolicy, JsonObject,
- MetricObservation, MetricSpec, MetricUnit, NodeAnnotation, NodeClass, NodeDiagnostics,
- NodePayload, NodeTrack, NonEmptyText, OptimizationObjective, PayloadSchemaRef,
- ProjectFieldSpec, ProjectSchema, RunRecord, RunStatus, ValidationDiagnostic,
+ ExperimentResult, FieldPresence, FieldRole, FieldValueType, FrontierContract, FrontierNote,
+ FrontierProjection, FrontierRecord, FrontierStatus, FrontierVerdict, GitCommitHash,
+ InferencePolicy, JsonObject, MetricObservation, MetricSpec, MetricUnit, NodeAnnotation,
+ NodeClass, NodeDiagnostics, NodePayload, NodeTrack, NonEmptyText, OptimizationObjective,
+ PayloadSchemaRef, ProjectFieldSpec, ProjectSchema, RunRecord, RunStatus, TagName, TagRecord,
+ ValidationDiagnostic,
};
diff --git a/crates/fidget-spinner-core/src/model.rs b/crates/fidget-spinner-core/src/model.rs
index f0d1818..2de3705 100644
--- a/crates/fidget-spinner-core/src/model.rs
+++ b/crates/fidget-spinner-core/src/model.rs
@@ -5,6 +5,7 @@ use camino::Utf8PathBuf;
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use time::OffsetDateTime;
+use time::format_description::well_known::Rfc3339;
use crate::{
AgentSessionId, AnnotationId, ArtifactId, CheckpointId, CoreError, ExperimentId, FrontierId,
@@ -57,6 +58,60 @@ impl Display for GitCommitHash {
}
}
+#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
+#[serde(try_from = "String", into = "String")]
+pub struct TagName(String);
+
+impl TagName {
+ pub fn new(value: impl Into<String>) -> Result<Self, CoreError> {
+ let normalized = value.into().trim().to_ascii_lowercase();
+ if normalized.is_empty() {
+ return Err(CoreError::EmptyTagName);
+ }
+ let mut previous_was_separator = true;
+ for character in normalized.chars() {
+ if character.is_ascii_lowercase() || character.is_ascii_digit() {
+ previous_was_separator = false;
+ continue;
+ }
+ if matches!(character, '-' | '_' | '/') && !previous_was_separator {
+ previous_was_separator = true;
+ continue;
+ }
+ return Err(CoreError::InvalidTagName(normalized));
+ }
+ if previous_was_separator {
+ return Err(CoreError::InvalidTagName(normalized));
+ }
+ Ok(Self(normalized))
+ }
+
+ #[must_use]
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl TryFrom<String> for TagName {
+ type Error = CoreError;
+
+ fn try_from(value: String) -> Result<Self, Self::Error> {
+ Self::new(value)
+ }
+}
+
+impl From<TagName> for String {
+ fn from(value: TagName) -> Self {
+ value.0
+ }
+}
+
+impl Display for TagName {
+ fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
+ formatter.write_str(&self.0)
+ }
+}
+
pub type JsonObject = Map<String, Value>;
#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
@@ -143,6 +198,44 @@ pub enum InferencePolicy {
ModelMayInfer,
}
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
+#[serde(rename_all = "snake_case")]
+pub enum FieldValueType {
+ String,
+ Numeric,
+ Boolean,
+ Timestamp,
+}
+
+impl FieldValueType {
+ #[must_use]
+ pub const fn is_plottable(self) -> bool {
+ matches!(self, Self::Numeric | Self::Timestamp)
+ }
+
+ #[must_use]
+ pub fn accepts(self, value: &Value) -> bool {
+ match self {
+ Self::String => value.is_string(),
+ Self::Numeric => value.is_number(),
+ Self::Boolean => value.is_boolean(),
+ Self::Timestamp => value
+ .as_str()
+ .is_some_and(|raw| OffsetDateTime::parse(raw, &Rfc3339).is_ok()),
+ }
+ }
+
+ #[must_use]
+ pub const fn as_str(self) -> &'static str {
+ match self {
+ Self::String => "string",
+ Self::Numeric => "numeric",
+ Self::Boolean => "boolean",
+ Self::Timestamp => "timestamp",
+ }
+ }
+}
+
#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub enum FrontierStatus {
Exploring,
@@ -265,6 +358,13 @@ impl NodeAnnotation {
}
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+pub struct TagRecord {
+ pub name: TagName,
+ pub description: NonEmptyText,
+ pub created_at: OffsetDateTime,
+}
+
+#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub struct ValidationDiagnostic {
pub severity: DiagnosticSeverity,
pub code: String,
@@ -296,6 +396,8 @@ pub struct ProjectFieldSpec {
pub severity: DiagnosticSeverity,
pub role: FieldRole,
pub inference_policy: InferencePolicy,
+ #[serde(default)]
+ pub value_type: Option<FieldValueType>,
}
impl ProjectFieldSpec {
@@ -303,6 +405,11 @@ impl ProjectFieldSpec {
pub fn applies_to(&self, class: NodeClass) -> bool {
self.node_classes.is_empty() || self.node_classes.contains(&class)
}
+
+ #[must_use]
+ pub fn is_plottable(&self) -> bool {
+ self.value_type.is_some_and(FieldValueType::is_plottable)
+ }
}
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
@@ -331,14 +438,37 @@ impl ProjectSchema {
}
#[must_use]
+ pub fn field_spec(&self, class: NodeClass, name: &str) -> Option<&ProjectFieldSpec> {
+ self.fields
+ .iter()
+ .find(|field| field.applies_to(class) && field.name.as_str() == name)
+ }
+
+ #[must_use]
pub fn validate_node(&self, class: NodeClass, payload: &NodePayload) -> NodeDiagnostics {
let items = self
.fields
.iter()
.filter(|field| field.applies_to(class))
.filter_map(|field| {
- let is_missing = payload.field(field.name.as_str()).is_none();
+ let value = payload.field(field.name.as_str());
+ let is_missing = value.is_none();
if !is_missing || field.presence == FieldPresence::Optional {
+ if let (Some(value), Some(value_type)) = (value, field.value_type)
+ && !value_type.accepts(value)
+ {
+ return Some(ValidationDiagnostic {
+ severity: field.severity,
+ code: format!("type.{}", field.name.as_str()),
+ message: validation_message(format!(
+ "project payload field `{}` expected {}, found {}",
+ field.name.as_str(),
+ value_type.as_str(),
+ json_value_kind(value)
+ )),
+ field_name: Some(field.name.as_str().to_owned()),
+ });
+ }
return None;
}
Some(ValidationDiagnostic {
@@ -366,6 +496,17 @@ fn validation_message(value: String) -> NonEmptyText {
}
}
+fn json_value_kind(value: &Value) -> &'static str {
+ match value {
+ Value::Null => "null",
+ Value::Bool(_) => "boolean",
+ Value::Number(_) => "numeric",
+ Value::String(_) => "string",
+ Value::Array(_) => "array",
+ Value::Object(_) => "object",
+ }
+}
+
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct DagNode {
pub id: NodeId,
@@ -375,6 +516,7 @@ pub struct DagNode {
pub archived: bool,
pub title: NonEmptyText,
pub summary: Option<NonEmptyText>,
+ pub tags: BTreeSet<TagName>,
pub payload: NodePayload,
pub annotations: Vec<NodeAnnotation>,
pub diagnostics: NodeDiagnostics,
@@ -402,6 +544,7 @@ impl DagNode {
archived: false,
title,
summary,
+ tags: BTreeSet::new(),
payload,
annotations: Vec::new(),
diagnostics,
@@ -628,8 +771,9 @@ mod tests {
use serde_json::json;
use super::{
- CommandRecipe, DagNode, DiagnosticSeverity, FieldPresence, FieldRole, InferencePolicy,
- JsonObject, NodeClass, NodePayload, NonEmptyText, ProjectFieldSpec, ProjectSchema,
+ CommandRecipe, DagNode, DiagnosticSeverity, FieldPresence, FieldRole, FieldValueType,
+ InferencePolicy, JsonObject, NodeClass, NodePayload, NonEmptyText, ProjectFieldSpec,
+ ProjectSchema,
};
use crate::CoreError;
@@ -661,6 +805,7 @@ mod tests {
severity: DiagnosticSeverity::Warning,
role: FieldRole::ProjectionGate,
inference_policy: InferencePolicy::ManualOnly,
+ value_type: None,
}],
};
let payload = NodePayload::with_schema(schema.schema_ref(), JsonObject::new());
@@ -673,6 +818,33 @@ mod tests {
}
#[test]
+ fn schema_validation_warns_on_type_mismatch() -> Result<(), CoreError> {
+ let schema = ProjectSchema {
+ namespace: NonEmptyText::new("local.libgrid")?,
+ version: 1,
+ fields: vec![ProjectFieldSpec {
+ name: NonEmptyText::new("improvement")?,
+ node_classes: BTreeSet::from([NodeClass::Analysis]),
+ presence: FieldPresence::Recommended,
+ severity: DiagnosticSeverity::Warning,
+ role: FieldRole::RenderOnly,
+ inference_policy: InferencePolicy::ManualOnly,
+ value_type: Some(FieldValueType::Numeric),
+ }],
+ };
+ let payload = NodePayload::with_schema(
+ schema.schema_ref(),
+ JsonObject::from_iter([("improvement".to_owned(), json!("not a number"))]),
+ );
+ let diagnostics = schema.validate_node(NodeClass::Analysis, &payload);
+
+ assert_eq!(diagnostics.admission, super::AdmissionState::Admitted);
+ assert_eq!(diagnostics.items.len(), 1);
+ assert_eq!(diagnostics.items[0].code, "type.improvement");
+ Ok(())
+ }
+
+ #[test]
fn research_nodes_default_to_off_path() -> Result<(), CoreError> {
let payload = NodePayload {
schema: None,
diff --git a/crates/fidget-spinner-store-sqlite/Cargo.toml b/crates/fidget-spinner-store-sqlite/Cargo.toml
index 54e0784..00fd070 100644
--- a/crates/fidget-spinner-store-sqlite/Cargo.toml
+++ b/crates/fidget-spinner-store-sqlite/Cargo.toml
@@ -1,9 +1,13 @@
[package]
name = "fidget-spinner-store-sqlite"
-description = "Per-project SQLite store for the Fidget Spinner DAG spine"
+categories.workspace = true
+description = "SQLite-backed per-project store for Fidget Spinner DAG projects"
edition.workspace = true
+keywords.workspace = true
license.workspace = true
publish = false
+readme.workspace = true
+repository.workspace = true
rust-version.workspace = true
version.workspace = true
diff --git a/crates/fidget-spinner-store-sqlite/src/lib.rs b/crates/fidget-spinner-store-sqlite/src/lib.rs
index 7c129ab..da9fa42 100644
--- a/crates/fidget-spinner-store-sqlite/src/lib.rs
+++ b/crates/fidget-spinner-store-sqlite/src/lib.rs
@@ -1,3 +1,5 @@
+use std::collections::BTreeSet;
+use std::fmt::Write as _;
use std::fs;
use std::io;
use std::process::Command;
@@ -9,9 +11,10 @@ use fidget_spinner_core::{
ExecutionBackend, ExperimentResult, FrontierContract, FrontierNote, FrontierProjection,
FrontierRecord, FrontierStatus, FrontierVerdict, GitCommitHash, JsonObject, MetricObservation,
MetricSpec, MetricUnit, NodeAnnotation, NodeClass, NodeDiagnostics, NodePayload, NonEmptyText,
- OptimizationObjective, ProjectSchema, RunRecord, RunStatus,
+ OptimizationObjective, ProjectSchema, RunRecord, RunStatus, TagName, TagRecord,
};
-use rusqlite::{Connection, OptionalExtension, Transaction, params};
+use rusqlite::types::Value as SqlValue;
+use rusqlite::{Connection, OptionalExtension, Transaction, params, params_from_iter};
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use thiserror::Error;
@@ -54,6 +57,12 @@ pub enum StoreError {
MissingChampionCheckpoint {
frontier_id: fidget_spinner_core::FrontierId,
},
+ #[error("unknown tag `{0}`")]
+ UnknownTag(TagName),
+ #[error("tag `{0}` already exists")]
+ DuplicateTag(TagName),
+ #[error("note nodes require an explicit tag list; use an empty list if no tags apply")]
+ NoteTagsRequired,
#[error("git repository inspection failed for {0}")]
GitInspectionFailed(Utf8PathBuf),
}
@@ -82,6 +91,7 @@ pub struct CreateNodeRequest {
pub frontier_id: Option<fidget_spinner_core::FrontierId>,
pub title: NonEmptyText,
pub summary: Option<NonEmptyText>,
+ pub tags: Option<BTreeSet<TagName>>,
pub payload: NodePayload,
pub annotations: Vec<NodeAnnotation>,
pub attachments: Vec<EdgeAttachment>,
@@ -122,6 +132,7 @@ impl EdgeAttachment {
pub struct ListNodesQuery {
pub frontier_id: Option<fidget_spinner_core::FrontierId>,
pub class: Option<NodeClass>,
+ pub tags: BTreeSet<TagName>,
pub include_archived: bool,
pub limit: u32,
}
@@ -131,6 +142,7 @@ impl Default for ListNodesQuery {
Self {
frontier_id: None,
class: None,
+ tags: BTreeSet::new(),
include_archived: false,
limit: 20,
}
@@ -146,6 +158,7 @@ pub struct NodeSummary {
pub archived: bool,
pub title: NonEmptyText,
pub summary: Option<NonEmptyText>,
+ pub tags: BTreeSet<TagName>,
pub diagnostic_count: u64,
pub hidden_annotation_count: u64,
pub created_at: OffsetDateTime,
@@ -317,6 +330,47 @@ impl ProjectStore {
self.frontier_projection(frontier.id)
}
+ pub fn add_tag(
+ &mut self,
+ name: TagName,
+ description: NonEmptyText,
+ ) -> Result<TagRecord, StoreError> {
+ let record = TagRecord {
+ name,
+ description,
+ created_at: OffsetDateTime::now_utc(),
+ };
+ let tx = self.connection.transaction()?;
+ insert_tag(&tx, &record)?;
+ insert_event(
+ &tx,
+ "tag",
+ record.name.as_str(),
+ "tag.created",
+ json!({"description": record.description.as_str()}),
+ )?;
+ tx.commit()?;
+ Ok(record)
+ }
+
+ pub fn list_tags(&self) -> Result<Vec<TagRecord>, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT name, description, created_at
+ FROM tags
+ ORDER BY name ASC",
+ )?;
+ let mut rows = statement.query([])?;
+ let mut items = Vec::new();
+ while let Some(row) = rows.next()? {
+ items.push(TagRecord {
+ name: TagName::new(row.get::<_, String>(0)?)?,
+ description: NonEmptyText::new(row.get::<_, String>(1)?)?,
+ created_at: decode_timestamp(&row.get::<_, String>(2)?)?,
+ });
+ }
+ Ok(items)
+ }
+
pub fn add_node(&mut self, request: CreateNodeRequest) -> Result<DagNode, StoreError> {
let diagnostics = self.schema.validate_node(request.class, &request.payload);
let mut node = DagNode::new(
@@ -327,9 +381,16 @@ impl ProjectStore {
request.payload,
diagnostics,
);
+ node.tags = match (request.class, request.tags) {
+ (NodeClass::Note, Some(tags)) => tags,
+ (NodeClass::Note, None) => return Err(StoreError::NoteTagsRequired),
+ (_, Some(tags)) => tags,
+ (_, None) => BTreeSet::new(),
+ };
node.annotations = request.annotations;
let tx = self.connection.transaction()?;
+ ensure_known_tags(&tx, &node.tags)?;
insert_node(&tx, &node)?;
for attachment in &request.attachments {
insert_edge(&tx, &attachment.materialize(node.id))?;
@@ -419,6 +480,7 @@ impl ProjectStore {
.query_row(params![node_id.to_string()], read_node_row)
.optional()?;
node.map(|mut item| {
+ item.tags = self.load_tags(item.id)?;
item.annotations = self.load_annotations(item.id)?;
Ok(item)
})
@@ -428,8 +490,7 @@ impl ProjectStore {
pub fn list_nodes(&self, query: ListNodesQuery) -> Result<Vec<NodeSummary>, StoreError> {
let frontier_id = query.frontier_id.map(|id| id.to_string());
let class = query.class.map(|item| item.as_str().to_owned());
- let limit = i64::from(query.limit);
- let mut statement = self.connection.prepare(
+ let mut sql = String::from(
"SELECT
n.id,
n.class,
@@ -449,21 +510,42 @@ impl ProjectStore {
FROM nodes AS n
WHERE (?1 IS NULL OR n.frontier_id = ?1)
AND (?2 IS NULL OR n.class = ?2)
- AND (?3 = 1 OR n.archived = 0)
+ AND (?3 = 1 OR n.archived = 0)",
+ );
+ let mut parameters = vec![
+ frontier_id.map_or(SqlValue::Null, SqlValue::Text),
+ class.map_or(SqlValue::Null, SqlValue::Text),
+ SqlValue::Integer(i64::from(query.include_archived)),
+ ];
+ for (index, tag) in query.tags.iter().enumerate() {
+ let placeholder = parameters.len() + 1;
+ let _ = write!(
+ sql,
+ "
+ AND EXISTS (
+ SELECT 1
+ FROM node_tags AS nt{index}
+ WHERE nt{index}.node_id = n.id AND nt{index}.tag_name = ?{placeholder}
+ )"
+ );
+ parameters.push(SqlValue::Text(tag.as_str().to_owned()));
+ }
+ let limit_placeholder = parameters.len() + 1;
+ let _ = write!(
+ sql,
+ "
ORDER BY n.updated_at DESC
- LIMIT ?4",
- )?;
- let mut rows = statement.query(params![
- frontier_id,
- class,
- i64::from(query.include_archived),
- limit
- ])?;
+ LIMIT ?{limit_placeholder}"
+ );
+ parameters.push(SqlValue::Integer(i64::from(query.limit)));
+ let mut statement = self.connection.prepare(&sql)?;
+ let mut rows = statement.query(params_from_iter(parameters.iter()))?;
let mut items = Vec::new();
while let Some(row) = rows.next()? {
let diagnostics = decode_json::<NodeDiagnostics>(&row.get::<_, String>(7)?)?;
+ let node_id = parse_node_id(&row.get::<_, String>(0)?)?;
items.push(NodeSummary {
- id: parse_node_id(&row.get::<_, String>(0)?)?,
+ id: node_id,
class: parse_node_class(&row.get::<_, String>(1)?)?,
track: parse_node_track(&row.get::<_, String>(2)?)?,
frontier_id: row
@@ -476,6 +558,7 @@ impl ProjectStore {
.get::<_, Option<String>>(6)?
.map(NonEmptyText::new)
.transpose()?,
+ tags: self.load_tags(node_id)?,
diagnostic_count: diagnostics.items.len() as u64,
hidden_annotation_count: row.get::<_, i64>(10)? as u64,
created_at: decode_timestamp(&row.get::<_, String>(8)?)?,
@@ -505,7 +588,7 @@ impl ProjectStore {
) -> Result<FrontierProjection, StoreError> {
let frontier = self.load_frontier(frontier_id)?;
let mut champion_checkpoint_id = None;
- let mut candidate_checkpoint_ids = std::collections::BTreeSet::new();
+ let mut candidate_checkpoint_ids = BTreeSet::new();
let mut statement = self.connection.prepare(
"SELECT id, disposition
@@ -768,6 +851,24 @@ impl ProjectStore {
Ok(items)
}
+ fn load_tags(
+ &self,
+ node_id: fidget_spinner_core::NodeId,
+ ) -> Result<BTreeSet<TagName>, StoreError> {
+ let mut statement = self.connection.prepare(
+ "SELECT tag_name
+ FROM node_tags
+ WHERE node_id = ?1
+ ORDER BY tag_name ASC",
+ )?;
+ let mut rows = statement.query(params![node_id.to_string()])?;
+ let mut items = BTreeSet::new();
+ while let Some(row) = rows.next()? {
+ let _ = items.insert(TagName::new(row.get::<_, String>(0)?)?);
+ }
+ Ok(items)
+ }
+
fn load_frontier(
&self,
frontier_id: fidget_spinner_core::FrontierId,
@@ -817,6 +918,18 @@ fn migrate(connection: &Connection) -> Result<(), StoreError> {
created_at TEXT NOT NULL
);
+ CREATE TABLE IF NOT EXISTS tags (
+ name TEXT PRIMARY KEY,
+ description TEXT NOT NULL,
+ created_at TEXT NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS node_tags (
+ node_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
+ tag_name TEXT NOT NULL REFERENCES tags(name) ON DELETE RESTRICT,
+ PRIMARY KEY (node_id, tag_name)
+ );
+
CREATE TABLE IF NOT EXISTS node_edges (
source_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
target_id TEXT NOT NULL REFERENCES nodes(id) ON DELETE CASCADE,
@@ -953,6 +1066,32 @@ fn insert_node(tx: &Transaction<'_>, node: &DagNode) -> Result<(), StoreError> {
for annotation in &node.annotations {
insert_annotation(tx, node.id, annotation)?;
}
+ for tag in &node.tags {
+ insert_node_tag(tx, node.id, tag)?;
+ }
+ Ok(())
+}
+
+fn insert_tag(tx: &Transaction<'_>, tag: &TagRecord) -> Result<(), StoreError> {
+ let existing = tx
+ .query_row(
+ "SELECT 1 FROM tags WHERE name = ?1",
+ params![tag.name.as_str()],
+ |row| row.get::<_, i64>(0),
+ )
+ .optional()?;
+ if existing.is_some() {
+ return Err(StoreError::DuplicateTag(tag.name.clone()));
+ }
+ let _ = tx.execute(
+ "INSERT INTO tags (name, description, created_at)
+ VALUES (?1, ?2, ?3)",
+ params![
+ tag.name.as_str(),
+ tag.description.as_str(),
+ encode_timestamp(tag.created_at)?,
+ ],
+ )?;
Ok(())
}
@@ -976,6 +1115,32 @@ fn insert_annotation(
Ok(())
}
+fn insert_node_tag(
+ tx: &Transaction<'_>,
+ node_id: fidget_spinner_core::NodeId,
+ tag: &TagName,
+) -> Result<(), StoreError> {
+ let _ = tx.execute(
+ "INSERT INTO node_tags (node_id, tag_name)
+ VALUES (?1, ?2)",
+ params![node_id.to_string(), tag.as_str()],
+ )?;
+ Ok(())
+}
+
+fn ensure_known_tags(tx: &Transaction<'_>, tags: &BTreeSet<TagName>) -> Result<(), StoreError> {
+ let mut statement = tx.prepare("SELECT 1 FROM tags WHERE name = ?1")?;
+ for tag in tags {
+ let exists = statement
+ .query_row(params![tag.as_str()], |row| row.get::<_, i64>(0))
+ .optional()?;
+ if exists.is_none() {
+ return Err(StoreError::UnknownTag(tag.clone()));
+ }
+ }
+ Ok(())
+}
+
fn insert_edge(tx: &Transaction<'_>, edge: &DagEdge) -> Result<(), StoreError> {
let _ = tx.execute(
"INSERT OR IGNORE INTO node_edges (source_id, target_id, kind)
@@ -1248,6 +1413,7 @@ fn read_node_row(row: &rusqlite::Row<'_>) -> Result<DagNode, rusqlite::Error> {
.map(NonEmptyText::new)
.transpose()
.map_err(core_to_sql_conversion_error)?,
+ tags: BTreeSet::new(),
payload,
annotations: Vec::new(),
diagnostics,
@@ -1626,6 +1792,8 @@ fn encode_frontier_verdict(verdict: FrontierVerdict) -> &'static str {
#[cfg(test)]
mod tests {
+ use std::collections::BTreeSet;
+
use serde_json::json;
use super::{
@@ -1633,7 +1801,7 @@ mod tests {
};
use fidget_spinner_core::{
CheckpointSnapshotRef, EvaluationProtocol, FrontierContract, MetricSpec, MetricUnit,
- NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective,
+ NodeAnnotation, NodeClass, NodePayload, NonEmptyText, OptimizationObjective, TagName,
};
fn temp_project_root(label: &str) -> camino::Utf8PathBuf {
@@ -1672,6 +1840,7 @@ mod tests {
frontier_id: None,
title: NonEmptyText::new("feature sketch")?,
summary: Some(NonEmptyText::new("research note")?),
+ tags: None,
payload: NodePayload::with_schema(
store.schema().schema_ref(),
super::json_object(json!({"body": "freeform"}))?,
@@ -1708,15 +1877,13 @@ mod tests {
contract: FrontierContract {
objective: NonEmptyText::new("improve wall time")?,
evaluation: EvaluationProtocol {
- benchmark_suites: std::collections::BTreeSet::from([NonEmptyText::new(
- "smoke",
- )?]),
+ benchmark_suites: BTreeSet::from([NonEmptyText::new("smoke")?]),
primary_metric: MetricSpec {
metric_key: NonEmptyText::new("wall_clock_s")?,
unit: MetricUnit::Seconds,
objective: OptimizationObjective::Minimize,
},
- supporting_metrics: std::collections::BTreeSet::new(),
+ supporting_metrics: BTreeSet::new(),
},
promotion_criteria: vec![NonEmptyText::new("strict speedup")?],
},
@@ -1748,6 +1915,7 @@ mod tests {
frontier_id: None,
title: NonEmptyText::new("quick note")?,
summary: None,
+ tags: Some(BTreeSet::new()),
payload: NodePayload::with_schema(
store.schema().schema_ref(),
super::json_object(json!({"body": "hello"}))?,
@@ -1783,15 +1951,13 @@ mod tests {
contract: FrontierContract {
objective: NonEmptyText::new("optimize")?,
evaluation: EvaluationProtocol {
- benchmark_suites: std::collections::BTreeSet::from([NonEmptyText::new(
- "smoke",
- )?]),
+ benchmark_suites: BTreeSet::from([NonEmptyText::new("smoke")?]),
primary_metric: MetricSpec {
metric_key: NonEmptyText::new("wall_clock_s")?,
unit: MetricUnit::Seconds,
objective: OptimizationObjective::Minimize,
},
- supporting_metrics: std::collections::BTreeSet::new(),
+ supporting_metrics: BTreeSet::new(),
},
promotion_criteria: vec![NonEmptyText::new("faster")?],
},
@@ -1807,4 +1973,75 @@ mod tests {
assert_eq!(nodes[0].class, NodeClass::Contract);
Ok(())
}
+
+ #[test]
+ fn notes_require_explicit_tags_even_when_empty() -> Result<(), super::StoreError> {
+ let root = temp_project_root("note-tags-required");
+ let mut store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+
+ let result = store.add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: None,
+ title: NonEmptyText::new("quick note")?,
+ summary: None,
+ tags: None,
+ payload: NodePayload::with_schema(
+ store.schema().schema_ref(),
+ super::json_object(json!({"body": "hello"}))?,
+ ),
+ annotations: Vec::new(),
+ attachments: Vec::new(),
+ });
+
+ assert!(matches!(result, Err(super::StoreError::NoteTagsRequired)));
+ Ok(())
+ }
+
+ #[test]
+ fn tags_round_trip_and_filter_node_list() -> Result<(), super::StoreError> {
+ let root = temp_project_root("tag-roundtrip");
+ let mut store = ProjectStore::init(
+ &root,
+ NonEmptyText::new("test project")?,
+ NonEmptyText::new("local.test")?,
+ )?;
+ let cuts = store.add_tag(
+ TagName::new("cuts/core")?,
+ NonEmptyText::new("Core cutset work")?,
+ )?;
+ let heuristics = store.add_tag(
+ TagName::new("heuristic")?,
+ NonEmptyText::new("Heuristic tuning")?,
+ )?;
+ let note = store.add_node(CreateNodeRequest {
+ class: NodeClass::Note,
+ frontier_id: None,
+ title: NonEmptyText::new("tagged note")?,
+ summary: None,
+ tags: Some(BTreeSet::from([cuts.name.clone(), heuristics.name.clone()])),
+ payload: NodePayload::with_schema(
+ store.schema().schema_ref(),
+ super::json_object(json!({"body": "tagged"}))?,
+ ),
+ annotations: Vec::new(),
+ attachments: Vec::new(),
+ })?;
+
+ let loaded = store
+ .get_node(note.id)?
+ .ok_or(super::StoreError::NodeNotFound(note.id))?;
+ assert_eq!(loaded.tags.len(), 2);
+
+ let filtered = store.list_nodes(ListNodesQuery {
+ tags: BTreeSet::from([cuts.name]),
+ ..ListNodesQuery::default()
+ })?;
+ assert_eq!(filtered.len(), 1);
+ assert_eq!(filtered[0].tags.len(), 2);
+ Ok(())
+ }
}
diff --git a/docs/architecture.md b/docs/architecture.md
index acab8fe..5db53fb 100644
--- a/docs/architecture.md
+++ b/docs/architecture.md
@@ -95,6 +95,16 @@ project schema in `.fidget_spinner/schema.json`.
This is where domain-specific richness lives.
+Project field specs may optionally declare a light-touch `value_type` of:
+
+- `string`
+- `numeric`
+- `boolean`
+- `timestamp`
+
+These are intentionally soft hints for validation and rendering, not rigid
+engine-schema commitments.
+
### 3. Annotation sidecar
Annotations are stored separately from payload and are default-hidden unless
@@ -122,6 +132,7 @@ Project field expectations are warning-heavy:
- missing recommended fields emit diagnostics
- missing projection-gated fields remain storable
+- mistyped typed fields emit diagnostics
- ingest usually succeeds
### Operational eligibility
@@ -308,7 +319,7 @@ This projection is derived from canonical state and intentionally rebuildable.
These are intentionally cheap:
-- `note.quick`
+- `note.quick`, but only with explicit tags from the repo-local registry
- `research.record`
- generic `node.create` for escape-hatch use
- `node.annotate`
@@ -356,6 +367,32 @@ worker subprocess.
- return typed success or typed fault records
- remain disposable without losing canonical state
+## Minimal Navigator
+
+The CLI also exposes a minimal localhost navigator through `ui serve`.
+
+Current shape:
+
+- left rail of repo-local tags
+- single linear node feed in reverse chronological order
+- full entry rendering in the main pane
+- lightweight hyperlinking for text fields
+- typed field badges for `string`, `numeric`, `boolean`, and `timestamp`
+
+This is intentionally not a full DAG canvas. It is a text-first operator window
+over the canonical store.
+
+## Binding Bootstrap
+
+`project.bind` may bootstrap a project store when the requested target root is
+an existing empty directory.
+
+That is intentionally narrow:
+
+- empty root: initialize and bind
+- non-empty uninitialized root: fail
+- existing store anywhere above the requested path: bind to that discovered root
+
### Fault model
Faults are typed by:
@@ -375,11 +412,11 @@ The tool catalog explicitly marks each operation as one of:
Current policy:
-- reads such as `project.status`, `project.schema`, `frontier.list`,
+- reads such as `project.status`, `project.schema`, `tag.list`, `frontier.list`,
`frontier.status`, `node.list`, `node.read`, `skill.list`, `skill.show`, and
resource reads
are safe to replay once after a retryable worker fault
-- mutating tools such as `frontier.init`, `node.create`, `change.record`,
+- mutating tools such as `tag.add`, `frontier.init`, `node.create`, `change.record`,
`node.annotate`, `node.archive`, `note.quick`, `research.record`, and
`experiment.close` are never auto-replayed
@@ -399,6 +436,8 @@ Implemented tools:
- `project.bind`
- `project.status`
- `project.schema`
+- `tag.add`
+- `tag.list`
- `frontier.list`
- `frontier.status`
- `frontier.init`
diff --git a/docs/product-spec.md b/docs/product-spec.md
index 8ab6210..89d392c 100644
--- a/docs/product-spec.md
+++ b/docs/product-spec.md
@@ -230,8 +230,10 @@ done.
- local SQLite backing store
- local blob directory
- typed Rust core model
+- optional light-touch project field types: `string`, `numeric`, `boolean`, `timestamp`
- thin CLI for bootstrap and repair
- hardened stdio MCP host exposed from the CLI
+- minimal read-only web navigator with tag filtering and linear node rendering
- disposable MCP worker execution runtime
- bundled `fidget-spinner` base skill
- bundled `frontier-loop` skill
@@ -259,6 +261,8 @@ The initial tools should be:
- `project.bind`
- `project.status`
- `project.schema`
+- `tag.add`
+- `tag.list`
- `frontier.list`
- `frontier.status`
- `frontier.init`
@@ -289,9 +293,9 @@ The bundled skills should instruct agents to:
1. inspect `system.health` first
2. bind the MCP session to the target project before project-local reads or writes
-3. read project schema and frontier state
+3. read project schema, tag registry, and frontier state
4. pull context from the DAG instead of giant prose dumps
-5. use `note.quick` and `research.record` freely off path
+5. use `note.quick` and `research.record` freely off path, but always pass an explicit tag list for notes
6. use `change.record` before worktree thrash becomes ambiguous
7. use `experiment.close` to atomically seal core-path work
8. archive detritus instead of deleting it
@@ -306,6 +310,8 @@ The MVP is successful when:
- an agent can inspect frontier state through MCP
- an agent can inspect MCP health and telemetry through MCP
- an agent can record off-path research without bureaucratic pain
+- the project schema can softly declare whether payload fields are strings, numbers, booleans, or timestamps
+- an operator can inspect recent nodes through a minimal localhost web navigator filtered by tag
- a git-backed project can close a real core-path experiment atomically
- retryable worker faults do not duplicate side effects
- stale nodes can be archived instead of polluting normal enumeration