diff options
| author | main <main@swarm.moe> | 2026-03-20 16:00:30 -0400 |
|---|---|---|
| committer | main <main@swarm.moe> | 2026-03-20 16:00:30 -0400 |
| commit | 9d63844f3a28fde70b19500422f17379e99e588a (patch) | |
| tree | 163cfbd65a8d3528346561410ef39eb1183a16f2 /crates/fidget-spinner-cli/src/main.rs | |
| parent | 22fe3d2ce7478450a1d7443c4ecbd85fd4c46716 (diff) | |
| download | fidget_spinner-9d63844f3a28fde70b19500422f17379e99e588a.zip | |
Refound Spinner as an austere frontier ledger
Diffstat (limited to 'crates/fidget-spinner-cli/src/main.rs')
| -rw-r--r-- | crates/fidget-spinner-cli/src/main.rs | 2014 |
1 files changed, 812 insertions, 1202 deletions
diff --git a/crates/fidget-spinner-cli/src/main.rs b/crates/fidget-spinner-cli/src/main.rs index f56e751..9de2515 100644 --- a/crates/fidget-spinner-cli/src/main.rs +++ b/crates/fidget-spinner-cli/src/main.rs @@ -4,27 +4,28 @@ mod ui; use std::collections::{BTreeMap, BTreeSet}; use std::fs; +use std::io; use std::net::SocketAddr; use std::path::{Path, PathBuf}; use camino::{Utf8Path, Utf8PathBuf}; use clap::{Args, Parser, Subcommand, ValueEnum}; use fidget_spinner_core::{ - AnnotationVisibility, CommandRecipe, DiagnosticSeverity, ExecutionBackend, FieldPresence, - FieldRole, FieldValueType, FrontierContract, FrontierNote, FrontierVerdict, InferencePolicy, - MetricSpec, MetricUnit, MetricValue, NodeAnnotation, NodeClass, NodePayload, NonEmptyText, - OptimizationObjective, ProjectFieldSpec, TagName, + ArtifactKind, CommandRecipe, ExecutionBackend, ExperimentAnalysis, ExperimentStatus, + FieldValueType, FrontierVerdict, MetricUnit, MetricVisibility, NonEmptyText, + OptimizationObjective, RunDimensionValue, Slug, TagName, }; use fidget_spinner_store_sqlite::{ - CloseExperimentRequest, CreateFrontierRequest, CreateNodeRequest, DefineMetricRequest, - DefineRunDimensionRequest, EdgeAttachment, EdgeAttachmentDirection, ExperimentAnalysisDraft, - ListNodesQuery, MetricBestQuery, MetricFieldSource, MetricKeyQuery, MetricRankOrder, - OpenExperimentRequest, ProjectStore, RemoveSchemaFieldRequest, STORE_DIR_NAME, StoreError, - UpsertSchemaFieldRequest, + AttachmentSelector, CloseExperimentRequest, CreateArtifactRequest, CreateFrontierRequest, + CreateHypothesisRequest, DefineMetricRequest, DefineRunDimensionRequest, + ExperimentOutcomePatch, FrontierRoadmapItemDraft, ListArtifactsQuery, ListExperimentsQuery, + ListHypothesesQuery, MetricBestQuery, MetricKeysQuery, MetricRankOrder, MetricScope, + OpenExperimentRequest, ProjectStore, STORE_DIR_NAME, StoreError, TextPatch, + UpdateArtifactRequest, UpdateExperimentRequest, UpdateFrontierBriefRequest, + UpdateHypothesisRequest, VertexSelector, }; use serde::Serialize; -use serde_json::{Map, Value, json}; -use uuid::Uuid; +use serde_json::Value; #[derive(Parser)] #[command( @@ -41,53 +42,52 @@ struct Cli { enum Command { /// Initialize a project-local `.fidget_spinner/` store. Init(InitArgs), - /// Read the local project payload schema. - Schema { + /// Inspect project metadata and coarse counts. + Project { #[command(subcommand)] - command: SchemaCommand, + command: ProjectCommand, }, - /// Create and inspect frontiers. + /// Manage the repo-local tag registry. + Tag { + #[command(subcommand)] + command: TagCommand, + }, + /// Create and inspect frontier scopes. Frontier { #[command(subcommand)] command: FrontierCommand, }, - /// Create, inspect, and mutate DAG nodes. - Node { + /// Record and inspect hypotheses. + Hypothesis { #[command(subcommand)] - command: NodeCommand, + command: HypothesisCommand, }, - /// Record terse off-path notes. - Note(NoteCommand), - /// Record core-path hypotheses before experimental work begins. - Hypothesis(HypothesisCommand), - /// Manage the repo-local tag registry. - Tag { + /// Open, inspect, update, and close experiments. + Experiment { #[command(subcommand)] - command: TagCommand, + command: ExperimentCommand, }, - /// Record imported sources and documentary context. - Source(SourceCommand), - /// Inspect rankable metrics across closed experiments. + /// Register external references and attach them to the ledger. + Artifact { + #[command(subcommand)] + command: ArtifactCommand, + }, + /// Manage project-level metric definitions and rankings. Metric { #[command(subcommand)] command: MetricCommand, }, - /// Define and inspect run dimensions used to slice experiment metrics. + /// Define the typed dimension vocabulary used to slice experiments. Dimension { #[command(subcommand)] command: DimensionCommand, }, - /// Close a core-path experiment atomically. - Experiment { - #[command(subcommand)] - command: ExperimentCommand, - }, /// Serve the hardened stdio MCP endpoint. Mcp { #[command(subcommand)] command: McpCommand, }, - /// Serve the minimal local web navigator. + /// Serve the local navigator. Ui { #[command(subcommand)] command: UiCommand, @@ -101,295 +101,148 @@ enum Command { #[derive(Args)] struct InitArgs { - /// Project root to initialize. #[arg(long, default_value = ".")] project: PathBuf, - /// Human-facing project name. Defaults to the directory name. #[arg(long)] name: Option<String>, - /// Payload schema namespace written into `.fidget_spinner/schema.json`. - #[arg(long, default_value = "local.project")] - namespace: String, } #[derive(Subcommand)] -enum SchemaCommand { - /// Show the current project schema as JSON. - Show(ProjectArg), - /// Add or replace one project schema field definition. - UpsertField(SchemaFieldUpsertArgs), - /// Remove one project schema field definition. - RemoveField(SchemaFieldRemoveArgs), +enum ProjectCommand { + Status(ProjectArg), } #[derive(Subcommand)] -enum FrontierCommand { - /// Create a frontier and root contract node. - Init(FrontierInitArgs), - /// Show one frontier projection or list frontiers when omitted. - Status(FrontierStatusArgs), -} - -#[derive(Args)] -struct FrontierInitArgs { - #[command(flatten)] - project: ProjectArg, - #[arg(long)] - label: String, - #[arg(long)] - objective: String, - #[arg(long, default_value = "frontier contract")] - contract_title: String, - #[arg(long)] - contract_summary: Option<String>, - #[arg(long = "benchmark-suite")] - benchmark_suites: Vec<String>, - #[arg(long = "promotion-criterion")] - promotion_criteria: Vec<String>, - #[arg(long = "primary-metric-key")] - primary_metric_key: String, - #[arg(long = "primary-metric-unit", value_enum)] - primary_metric_unit: CliMetricUnit, - #[arg(long = "primary-metric-objective", value_enum)] - primary_metric_objective: CliOptimizationObjective, -} - -#[derive(Args)] -struct FrontierStatusArgs { - #[command(flatten)] - project: ProjectArg, - #[arg(long)] - frontier: Option<String>, +enum TagCommand { + Add(TagAddArgs), + List(ProjectArg), } #[derive(Subcommand)] -enum NodeCommand { - /// Create a generic DAG node. - Add(NodeAddArgs), - /// List recent nodes. - List(NodeListArgs), - /// Show one node in full. - Show(NodeShowArgs), - /// Attach an annotation to a node. - Annotate(NodeAnnotateArgs), - /// Archive a node without deleting it. - Archive(NodeArchiveArgs), -} - -#[derive(Args)] -struct NodeAddArgs { - #[command(flatten)] - project: ProjectArg, - #[arg(long, value_enum)] - class: CliNodeClass, - #[arg(long)] - frontier: Option<String>, - #[arg(long)] - title: String, - #[arg(long)] - /// Required for `note` and `source` nodes. - summary: Option<String>, - #[arg(long = "payload-json")] - /// JSON object payload. `note` and `source` nodes require a non-empty `body` string. - payload_json: Option<String>, - #[arg(long = "payload-file")] - payload_file: Option<PathBuf>, - #[command(flatten)] - tag_selection: ExplicitTagSelectionArgs, - #[arg(long = "field")] - fields: Vec<String>, - #[arg(long = "annotation")] - annotations: Vec<String>, - #[arg(long = "parent")] - parents: Vec<String>, -} - -#[derive(Args)] -struct NodeListArgs { - #[command(flatten)] - project: ProjectArg, - #[arg(long)] - frontier: Option<String>, - #[arg(long, value_enum)] - class: Option<CliNodeClass>, - #[arg(long = "tag")] - tags: Vec<String>, - #[arg(long)] - include_archived: bool, - #[arg(long, default_value_t = 20)] - limit: u32, -} - -#[derive(Args, Default)] -struct ExplicitTagSelectionArgs { - #[arg(long = "tag")] - tags: Vec<String>, - #[arg(long, conflicts_with = "tags")] - no_tags: bool, -} - -#[derive(Args)] -struct NodeShowArgs { - #[command(flatten)] - project: ProjectArg, - #[arg(long)] - node: String, -} - -#[derive(Args)] -struct NodeAnnotateArgs { - #[command(flatten)] - project: ProjectArg, - #[arg(long)] - node: String, - #[arg(long)] - body: String, - #[arg(long)] - label: Option<String>, - #[arg(long)] - visible: bool, -} - -#[derive(Args)] -struct NodeArchiveArgs { - #[command(flatten)] - project: ProjectArg, - #[arg(long)] - node: String, +enum FrontierCommand { + Create(FrontierCreateArgs), + List(ProjectArg), + Read(FrontierSelectorArgs), + Open(FrontierSelectorArgs), + UpdateBrief(FrontierBriefUpdateArgs), + History(FrontierSelectorArgs), } -#[derive(Args)] -struct NoteCommand { - #[command(subcommand)] - command: NoteSubcommand, +#[derive(Subcommand)] +enum HypothesisCommand { + Record(HypothesisRecordArgs), + List(HypothesisListArgs), + Read(HypothesisSelectorArgs), + Update(HypothesisUpdateArgs), + History(HypothesisSelectorArgs), } -#[derive(Args)] -struct HypothesisCommand { - #[command(subcommand)] - command: HypothesisSubcommand, +#[derive(Subcommand)] +enum ExperimentCommand { + Open(ExperimentOpenArgs), + List(ExperimentListArgs), + Read(ExperimentSelectorArgs), + Update(ExperimentUpdateArgs), + Close(ExperimentCloseArgs), + History(ExperimentSelectorArgs), } #[derive(Subcommand)] -enum NoteSubcommand { - /// Record a quick off-path note. - Quick(QuickNoteArgs), +enum ArtifactCommand { + Record(ArtifactRecordArgs), + List(ArtifactListArgs), + Read(ArtifactSelectorArgs), + Update(ArtifactUpdateArgs), + History(ArtifactSelectorArgs), } #[derive(Subcommand)] -enum HypothesisSubcommand { - /// Record a core-path hypothesis with low ceremony. - Add(QuickHypothesisArgs), +enum MetricCommand { + Define(MetricDefineArgs), + Keys(MetricKeysArgs), + Best(MetricBestArgs), } #[derive(Subcommand)] -enum TagCommand { - /// Register a new repo-local tag. - Add(TagAddArgs), - /// List registered repo-local tags. +enum DimensionCommand { + Define(DimensionDefineArgs), List(ProjectArg), } -#[derive(Args)] -struct SourceCommand { - #[command(subcommand)] - command: SourceSubcommand, +#[derive(Subcommand)] +enum McpCommand { + Serve(McpServeArgs), + Worker(McpWorkerArgs), } #[derive(Subcommand)] -enum SourceSubcommand { - /// Record imported source material or documentary context. - Add(QuickSourceArgs), +enum UiCommand { + Serve(UiServeArgs), } #[derive(Subcommand)] -enum MetricCommand { - /// Register a project-level metric definition. - Define(MetricDefineArgs), - /// List rankable numeric keys observed in completed experiments. - Keys(MetricKeysArgs), - /// Rank completed experiments by one numeric key. - Best(MetricBestArgs), - /// Re-run the idempotent legacy metric-plane normalization. - Migrate(ProjectArg), +enum SkillCommand { + List, + Install(SkillInstallArgs), + Show(SkillShowArgs), } -#[derive(Subcommand)] -enum DimensionCommand { - /// Register a project-level run dimension definition. - Define(DimensionDefineArgs), - /// List run dimensions and sample values observed in completed runs. - List(ProjectArg), +#[derive(Args, Clone)] +struct ProjectArg { + #[arg(long, default_value = ".")] + project: PathBuf, } #[derive(Args)] -struct MetricDefineArgs { +struct TagAddArgs { #[command(flatten)] project: ProjectArg, - /// Metric key used in experiment closure and ranking. #[arg(long)] - key: String, - /// Canonical unit for this metric key. - #[arg(long, value_enum)] - unit: CliMetricUnit, - /// Optimization direction for this metric key. - #[arg(long, value_enum)] - objective: CliOptimizationObjective, - /// Optional human description shown in metric listings. + name: String, #[arg(long)] - description: Option<String>, + description: String, } #[derive(Args)] -struct MetricKeysArgs { +struct FrontierCreateArgs { #[command(flatten)] project: ProjectArg, - /// Restrict results to one frontier. #[arg(long)] - frontier: Option<String>, - /// Restrict results to one metric source. - #[arg(long, value_enum)] - source: Option<CliMetricSource>, - /// Exact run-dimension filter in the form `key=value`. - #[arg(long = "dimension")] - dimensions: Vec<String>, + label: String, + #[arg(long)] + objective: String, + #[arg(long)] + slug: Option<String>, } #[derive(Args)] -struct DimensionDefineArgs { +struct FrontierSelectorArgs { #[command(flatten)] project: ProjectArg, - /// Run-dimension key used to slice experiments. #[arg(long)] - key: String, - /// Canonical value type for this run dimension. - #[arg(long = "type", value_enum)] - value_type: CliFieldValueType, - /// Optional human description shown in dimension listings. - #[arg(long)] - description: Option<String>, + frontier: String, } #[derive(Args)] -struct QuickNoteArgs { +struct FrontierBriefUpdateArgs { #[command(flatten)] project: ProjectArg, #[arg(long)] - frontier: Option<String>, + frontier: String, #[arg(long)] - title: String, + expected_revision: Option<u64>, #[arg(long)] - summary: String, + situation: Option<String>, #[arg(long)] - body: String, - #[command(flatten)] - tag_selection: ExplicitTagSelectionArgs, - #[arg(long = "parent")] - parents: Vec<String>, + clear_situation: bool, + #[arg(long = "unknown")] + unknowns: Vec<String>, + #[arg(long = "roadmap")] + roadmap: Vec<String>, } #[derive(Args)] -struct QuickHypothesisArgs { +struct HypothesisRecordArgs { #[command(flatten)] project: ProjectArg, #[arg(long)] @@ -400,255 +253,330 @@ struct QuickHypothesisArgs { summary: String, #[arg(long)] body: String, + #[arg(long)] + slug: Option<String>, + #[arg(long = "tag")] + tags: Vec<String>, #[arg(long = "parent")] parents: Vec<String>, } #[derive(Args)] -struct TagAddArgs { +struct HypothesisListArgs { #[command(flatten)] project: ProjectArg, #[arg(long)] - name: String, + frontier: Option<String>, + #[arg(long = "tag")] + tags: Vec<String>, #[arg(long)] - description: String, + include_archived: bool, + #[arg(long)] + limit: Option<u32>, } #[derive(Args)] -struct QuickSourceArgs { +struct HypothesisSelectorArgs { #[command(flatten)] project: ProjectArg, #[arg(long)] - frontier: Option<String>, + hypothesis: String, +} + +#[derive(Args)] +struct HypothesisUpdateArgs { + #[command(flatten)] + project: ProjectArg, #[arg(long)] - title: String, + hypothesis: String, #[arg(long)] - summary: String, + expected_revision: Option<u64>, #[arg(long)] - body: String, + title: Option<String>, + #[arg(long)] + summary: Option<String>, + #[arg(long)] + body: Option<String>, + #[arg(long = "tag")] + tags: Vec<String>, + #[arg(long = "replace-tags")] + replace_tags: bool, + #[arg(long = "parent")] + parents: Vec<String>, + #[arg(long = "replace-parents")] + replace_parents: bool, + #[arg(long, value_enum)] + state: Option<CliArchivePatch>, +} + +#[derive(Args)] +struct ExperimentOpenArgs { #[command(flatten)] - tag_selection: ExplicitTagSelectionArgs, + project: ProjectArg, + #[arg(long)] + hypothesis: String, + #[arg(long)] + title: String, + #[arg(long)] + summary: Option<String>, + #[arg(long)] + slug: Option<String>, + #[arg(long = "tag")] + tags: Vec<String>, #[arg(long = "parent")] parents: Vec<String>, } #[derive(Args)] -struct SchemaFieldUpsertArgs { +struct ExperimentListArgs { #[command(flatten)] project: ProjectArg, #[arg(long)] - name: String, - #[arg(long = "class", value_enum)] - classes: Vec<CliNodeClass>, - #[arg(long, value_enum)] - presence: CliFieldPresence, - #[arg(long, value_enum)] - severity: CliDiagnosticSeverity, + frontier: Option<String>, + #[arg(long)] + hypothesis: Option<String>, #[arg(long, value_enum)] - role: CliFieldRole, - #[arg(long = "inference", value_enum)] - inference_policy: CliInferencePolicy, - #[arg(long = "type", value_enum)] - value_type: Option<CliFieldValueType>, + status: Option<CliExperimentStatus>, + #[arg(long = "tag")] + tags: Vec<String>, + #[arg(long)] + include_archived: bool, + #[arg(long)] + limit: Option<u32>, } #[derive(Args)] -struct SchemaFieldRemoveArgs { +struct ExperimentSelectorArgs { #[command(flatten)] project: ProjectArg, #[arg(long)] - name: String, - #[arg(long = "class", value_enum)] - classes: Vec<CliNodeClass>, + experiment: String, } #[derive(Args)] -struct MetricBestArgs { +struct ExperimentUpdateArgs { #[command(flatten)] project: ProjectArg, - /// Metric key to rank on. #[arg(long)] - key: String, - /// Restrict results to one frontier. + experiment: String, #[arg(long)] - frontier: Option<String>, - /// Restrict results to one metric source. - #[arg(long, value_enum)] - source: Option<CliMetricSource>, - /// Explicit ordering for sources whose objective cannot be inferred. + expected_revision: Option<u64>, + #[arg(long)] + title: Option<String>, + #[arg(long)] + summary: Option<String>, + #[arg(long)] + clear_summary: bool, + #[arg(long = "tag")] + tags: Vec<String>, + #[arg(long = "replace-tags")] + replace_tags: bool, + #[arg(long = "parent")] + parents: Vec<String>, + #[arg(long = "replace-parents")] + replace_parents: bool, #[arg(long, value_enum)] - order: Option<CliMetricOrder>, - /// Exact run-dimension filter in the form `key=value`. - #[arg(long = "dimension")] - dimensions: Vec<String>, - /// Maximum number of ranked experiments to return. - #[arg(long, default_value_t = 10)] - limit: u32, -} - -#[derive(Subcommand)] -enum ExperimentCommand { - /// Open a stateful experiment against one hypothesis. - Open(ExperimentOpenArgs), - /// List open experiments, optionally narrowed to one frontier. - List(ExperimentListArgs), - /// Close a core-path experiment with run data, note, and verdict. - Close(Box<ExperimentCloseArgs>), -} - -#[derive(Subcommand)] -enum McpCommand { - /// Serve the public stdio MCP host. If `--project` is omitted, the host starts unbound. - Serve(McpServeArgs), - #[command(hide = true)] - Worker(McpWorkerArgs), -} - -#[derive(Subcommand)] -enum UiCommand { - /// Serve the local read-only navigator. - Serve(UiServeArgs), + state: Option<CliArchivePatch>, + #[arg(long = "outcome-json")] + outcome_json: Option<String>, + #[arg(long = "outcome-file")] + outcome_file: Option<PathBuf>, } #[derive(Args)] struct ExperimentCloseArgs { #[command(flatten)] project: ProjectArg, - #[arg(long = "experiment")] - experiment_id: String, - #[arg(long = "run-title")] - run_title: String, - #[arg(long = "run-summary")] - run_summary: Option<String>, - /// Repeat for each run dimension as `key=value`. - #[arg(long = "dimension")] - dimensions: Vec<String>, - #[arg(long = "backend", value_enum, default_value_t = CliExecutionBackend::Worktree)] + #[arg(long)] + experiment: String, + #[arg(long)] + expected_revision: Option<u64>, + #[arg(long, value_enum)] backend: CliExecutionBackend, - #[arg(long = "cwd")] - working_directory: Option<PathBuf>, - /// Repeat for each argv token passed to the recorded command. #[arg(long = "argv")] argv: Vec<String>, - /// Repeat for each environment override as `KEY=VALUE`. + #[arg(long)] + working_directory: Option<PathBuf>, #[arg(long = "env")] env: Vec<String>, - /// Primary metric in the form `key=value`; key must be preregistered. + #[arg(long = "dimension")] + dimensions: Vec<String>, #[arg(long = "primary-metric")] primary_metric: String, - /// Supporting metric in the form `key=value`; repeat as needed. #[arg(long = "metric")] - metrics: Vec<String>, - #[arg(long)] - note: String, - #[arg(long = "next-hypothesis")] - next_hypotheses: Vec<String>, - #[arg(long = "verdict", value_enum)] + supporting_metrics: Vec<String>, + #[arg(long, value_enum)] verdict: CliFrontierVerdict, - #[arg(long = "analysis-title")] - analysis_title: Option<String>, - #[arg(long = "analysis-summary")] + #[arg(long)] + rationale: String, + #[arg(long)] analysis_summary: Option<String>, - #[arg(long = "analysis-body")] + #[arg(long)] analysis_body: Option<String>, - #[arg(long = "decision-title")] - decision_title: String, - #[arg(long = "decision-rationale")] - decision_rationale: String, } #[derive(Args)] -struct ExperimentOpenArgs { +struct ArtifactRecordArgs { #[command(flatten)] project: ProjectArg, #[arg(long)] - frontier: String, - #[arg(long = "hypothesis-node")] - hypothesis_node: String, + kind: CliArtifactKind, #[arg(long)] - title: String, + label: String, #[arg(long)] summary: Option<String>, + #[arg(long)] + locator: String, + #[arg(long)] + media_type: Option<String>, + #[arg(long)] + slug: Option<String>, + #[arg(long = "attach")] + attachments: Vec<String>, } #[derive(Args)] -struct ExperimentListArgs { +struct ArtifactListArgs { #[command(flatten)] project: ProjectArg, #[arg(long)] frontier: Option<String>, + #[arg(long)] + kind: Option<CliArtifactKind>, + #[arg(long)] + attached_to: Option<String>, + #[arg(long)] + limit: Option<u32>, } -#[derive(Subcommand)] -enum SkillCommand { - /// List bundled skills. - List, - /// Install bundled skills into a Codex skill directory. - Install(SkillInstallArgs), - /// Print one bundled skill body. - Show(SkillShowArgs), +#[derive(Args)] +struct ArtifactSelectorArgs { + #[command(flatten)] + project: ProjectArg, + #[arg(long)] + artifact: String, } #[derive(Args)] -struct SkillInstallArgs { - /// Bundled skill name. Defaults to all bundled skills. +struct ArtifactUpdateArgs { + #[command(flatten)] + project: ProjectArg, #[arg(long)] - name: Option<String>, - /// Destination root. Defaults to `~/.codex/skills`. + artifact: String, #[arg(long)] - destination: Option<PathBuf>, + expected_revision: Option<u64>, + #[arg(long)] + kind: Option<CliArtifactKind>, + #[arg(long)] + label: Option<String>, + #[arg(long)] + summary: Option<String>, + #[arg(long)] + clear_summary: bool, + #[arg(long)] + locator: Option<String>, + #[arg(long)] + media_type: Option<String>, + #[arg(long)] + clear_media_type: bool, + #[arg(long = "attach")] + attachments: Vec<String>, + #[arg(long = "replace-attachments")] + replace_attachments: bool, } #[derive(Args)] -struct SkillShowArgs { - /// Bundled skill name. Defaults to `fidget-spinner`. +struct MetricDefineArgs { + #[command(flatten)] + project: ProjectArg, #[arg(long)] - name: Option<String>, + key: String, + #[arg(long, value_enum)] + unit: CliMetricUnit, + #[arg(long, value_enum)] + objective: CliOptimizationObjective, + #[arg(long, value_enum, default_value_t = CliMetricVisibility::Canonical)] + visibility: CliMetricVisibility, + #[arg(long)] + description: Option<String>, } #[derive(Args)] -struct ProjectArg { - /// Project root or any nested path inside a project containing `.fidget_spinner/`. - #[arg(long, default_value = ".")] - project: PathBuf, +struct MetricKeysArgs { + #[command(flatten)] + project: ProjectArg, + #[arg(long)] + frontier: Option<String>, + #[arg(long, value_enum, default_value_t = CliMetricScope::Live)] + scope: CliMetricScope, +} + +#[derive(Args)] +struct MetricBestArgs { + #[command(flatten)] + project: ProjectArg, + #[arg(long)] + frontier: Option<String>, + #[arg(long)] + hypothesis: Option<String>, + #[arg(long)] + key: String, + #[arg(long = "dimension")] + dimensions: Vec<String>, + #[arg(long)] + include_rejected: bool, + #[arg(long)] + limit: Option<u32>, + #[arg(long, value_enum)] + order: Option<CliMetricRankOrder>, +} + +#[derive(Args)] +struct DimensionDefineArgs { + #[command(flatten)] + project: ProjectArg, + #[arg(long)] + key: String, + #[arg(long, value_enum)] + value_type: CliFieldValueType, + #[arg(long)] + description: Option<String>, } #[derive(Args)] struct McpServeArgs { - /// Optional initial project binding. When omitted, the MCP starts unbound. #[arg(long)] project: Option<PathBuf>, } #[derive(Args)] struct McpWorkerArgs { - #[arg(long)] + #[arg(long, default_value = ".")] project: PathBuf, } #[derive(Args)] struct UiServeArgs { - /// Path to serve. Accepts a project root, `.fidget_spinner/`, descendants inside it, - /// or a parent directory containing one unique descendant project store. - #[arg(long = "path", alias = "project", default_value = ".")] + #[arg(long, default_value = ".")] path: PathBuf, - /// Bind address for the local navigator. #[arg(long, default_value = "127.0.0.1:8913")] bind: SocketAddr, - /// Maximum rows rendered in list views. - #[arg(long, default_value_t = 200)] - limit: u32, + #[arg(long)] + limit: Option<u32>, } -#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] -enum CliNodeClass { - Contract, - Hypothesis, - Run, - Analysis, - Decision, - Source, - Note, +#[derive(Args)] +struct SkillInstallArgs { + #[arg(long)] + name: Option<String>, + #[arg(long)] + destination: Option<PathBuf>, +} + +#[derive(Args)] +struct SkillShowArgs { + #[arg(long)] + name: Option<String>, } #[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] @@ -668,23 +596,22 @@ enum CliOptimizationObjective { } #[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] -enum CliExecutionBackend { - Local, - Worktree, - Ssh, +enum CliMetricVisibility { + Canonical, + Minor, + Hidden, + Archived, } #[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] -enum CliMetricSource { - RunMetric, - HypothesisPayload, - RunPayload, - AnalysisPayload, - DecisionPayload, +enum CliMetricScope { + Live, + Visible, + All, } #[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] -enum CliMetricOrder { +enum CliMetricRankOrder { Asc, Desc, } @@ -698,31 +625,23 @@ enum CliFieldValueType { } #[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] -enum CliDiagnosticSeverity { - Error, - Warning, - Info, -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] -enum CliFieldPresence { - Required, - Recommended, - Optional, +enum CliArtifactKind { + Document, + Link, + Log, + Table, + Plot, + Dump, + Binary, + Other, } #[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] -enum CliFieldRole { - Index, - ProjectionGate, - RenderOnly, - Opaque, -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] -enum CliInferencePolicy { - ManualOnly, - ModelMayInfer, +enum CliExecutionBackend { + Manual, + LocalProcess, + WorktreeProcess, + SshProcess, } #[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] @@ -733,63 +652,89 @@ enum CliFrontierVerdict { Rejected, } -fn main() { - if let Err(error) = run() { - eprintln!("error: {error}"); - std::process::exit(1); - } +#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] +enum CliExperimentStatus { + Open, + Closed, } -fn run() -> Result<(), StoreError> { +#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] +enum CliArchivePatch { + Archive, + Restore, +} + +fn main() -> Result<(), StoreError> { let cli = Cli::parse(); match cli.command { Command::Init(args) => run_init(args), - Command::Schema { command } => match command { - SchemaCommand::Show(project) => { - let store = open_store(&project.project)?; - print_json(store.schema()) - } - SchemaCommand::UpsertField(args) => run_schema_field_upsert(args), - SchemaCommand::RemoveField(args) => run_schema_field_remove(args), + Command::Project { command } => match command { + ProjectCommand::Status(args) => print_json(&open_store(&args.project)?.status()?), }, - Command::Frontier { command } => match command { - FrontierCommand::Init(args) => run_frontier_init(args), - FrontierCommand::Status(args) => run_frontier_status(args), - }, - Command::Node { command } => match command { - NodeCommand::Add(args) => run_node_add(args), - NodeCommand::List(args) => run_node_list(args), - NodeCommand::Show(args) => run_node_show(args), - NodeCommand::Annotate(args) => run_node_annotate(args), - NodeCommand::Archive(args) => run_node_archive(args), + Command::Tag { command } => match command { + TagCommand::Add(args) => run_tag_add(args), + TagCommand::List(args) => print_json(&open_store(&args.project)?.list_tags()?), }, - Command::Note(command) => match command.command { - NoteSubcommand::Quick(args) => run_quick_note(args), + Command::Frontier { command } => match command { + FrontierCommand::Create(args) => run_frontier_create(args), + FrontierCommand::List(args) => { + print_json(&open_store(&args.project)?.list_frontiers()?) + } + FrontierCommand::Read(args) => { + print_json(&open_store(&args.project.project)?.read_frontier(&args.frontier)?) + } + FrontierCommand::Open(args) => { + print_json(&open_store(&args.project.project)?.frontier_open(&args.frontier)?) + } + FrontierCommand::UpdateBrief(args) => run_frontier_brief_update(args), + FrontierCommand::History(args) => { + print_json(&open_store(&args.project.project)?.frontier_history(&args.frontier)?) + } }, - Command::Hypothesis(command) => match command.command { - HypothesisSubcommand::Add(args) => run_quick_hypothesis(args), + Command::Hypothesis { command } => match command { + HypothesisCommand::Record(args) => run_hypothesis_record(args), + HypothesisCommand::List(args) => run_hypothesis_list(args), + HypothesisCommand::Read(args) => { + print_json(&open_store(&args.project.project)?.read_hypothesis(&args.hypothesis)?) + } + HypothesisCommand::Update(args) => run_hypothesis_update(args), + HypothesisCommand::History(args) => print_json( + &open_store(&args.project.project)?.hypothesis_history(&args.hypothesis)?, + ), }, - Command::Tag { command } => match command { - TagCommand::Add(args) => run_tag_add(args), - TagCommand::List(project) => run_tag_list(project), + Command::Experiment { command } => match command { + ExperimentCommand::Open(args) => run_experiment_open(args), + ExperimentCommand::List(args) => run_experiment_list(args), + ExperimentCommand::Read(args) => { + print_json(&open_store(&args.project.project)?.read_experiment(&args.experiment)?) + } + ExperimentCommand::Update(args) => run_experiment_update(args), + ExperimentCommand::Close(args) => run_experiment_close(args), + ExperimentCommand::History(args) => print_json( + &open_store(&args.project.project)?.experiment_history(&args.experiment)?, + ), }, - Command::Source(command) => match command.command { - SourceSubcommand::Add(args) => run_quick_source(args), + Command::Artifact { command } => match command { + ArtifactCommand::Record(args) => run_artifact_record(args), + ArtifactCommand::List(args) => run_artifact_list(args), + ArtifactCommand::Read(args) => { + print_json(&open_store(&args.project.project)?.read_artifact(&args.artifact)?) + } + ArtifactCommand::Update(args) => run_artifact_update(args), + ArtifactCommand::History(args) => { + print_json(&open_store(&args.project.project)?.artifact_history(&args.artifact)?) + } }, Command::Metric { command } => match command { MetricCommand::Define(args) => run_metric_define(args), MetricCommand::Keys(args) => run_metric_keys(args), MetricCommand::Best(args) => run_metric_best(args), - MetricCommand::Migrate(project) => run_metric_migrate(project), }, Command::Dimension { command } => match command { DimensionCommand::Define(args) => run_dimension_define(args), - DimensionCommand::List(project) => run_dimension_list(project), - }, - Command::Experiment { command } => match command { - ExperimentCommand::Open(args) => run_experiment_open(args), - ExperimentCommand::List(args) => run_experiment_list(args), - ExperimentCommand::Close(args) => run_experiment_close(*args), + DimensionCommand::List(args) => { + print_json(&open_store(&args.project)?.list_run_dimensions()?) + } }, Command::Mcp { command } => match command { McpCommand::Serve(args) => mcp::serve(args.project), @@ -811,385 +756,278 @@ fn run() -> Result<(), StoreError> { fn run_init(args: InitArgs) -> Result<(), StoreError> { let project_root = utf8_path(args.project); - let display_name = args - .name - .map(NonEmptyText::new) - .transpose()? - .unwrap_or(default_display_name_for_root(&project_root)?); - let namespace = NonEmptyText::new(args.namespace)?; - let store = ProjectStore::init(&project_root, display_name, namespace)?; - println!("initialized {}", store.state_root()); - println!("project: {}", store.config().display_name); - println!("schema: {}", store.state_root().join("schema.json")); - maybe_print_gitignore_hint(&project_root)?; - Ok(()) + let store = ProjectStore::init( + &project_root, + args.name + .map(NonEmptyText::new) + .transpose()? + .unwrap_or(default_display_name_for_root(&project_root)?), + )?; + print_json(&store.status()?) } -fn run_frontier_init(args: FrontierInitArgs) -> Result<(), StoreError> { +fn run_tag_add(args: TagAddArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let projection = store.create_frontier(CreateFrontierRequest { - label: NonEmptyText::new(args.label)?, - contract_title: NonEmptyText::new(args.contract_title)?, - contract_summary: args.contract_summary.map(NonEmptyText::new).transpose()?, - contract: FrontierContract { - objective: NonEmptyText::new(args.objective)?, - evaluation: fidget_spinner_core::EvaluationProtocol { - benchmark_suites: to_text_set(args.benchmark_suites)?, - primary_metric: MetricSpec { - metric_key: NonEmptyText::new(args.primary_metric_key)?, - unit: args.primary_metric_unit.into(), - objective: args.primary_metric_objective.into(), - }, - supporting_metrics: BTreeSet::new(), - }, - promotion_criteria: to_text_vec(args.promotion_criteria)?, - }, - })?; - print_json(&projection) + print_json(&store.register_tag( + TagName::new(args.name)?, + NonEmptyText::new(args.description)?, + )?) } -fn run_frontier_status(args: FrontierStatusArgs) -> Result<(), StoreError> { - let store = open_store(&args.project.project)?; - if let Some(frontier) = args.frontier { - let projection = store.frontier_projection(parse_frontier_id(&frontier)?)?; - return print_json(&projection); - } - let frontiers = store.list_frontiers()?; - if frontiers.len() == 1 { - return print_json(&store.frontier_projection(frontiers[0].id)?); - } - print_json(&frontiers) +fn run_frontier_create(args: FrontierCreateArgs) -> Result<(), StoreError> { + let mut store = open_store(&args.project.project)?; + print_json(&store.create_frontier(CreateFrontierRequest { + label: NonEmptyText::new(args.label)?, + objective: NonEmptyText::new(args.objective)?, + slug: args.slug.map(Slug::new).transpose()?, + })?) } -fn run_schema_field_upsert(args: SchemaFieldUpsertArgs) -> Result<(), StoreError> { - let mut store = open_store(&args.project.project)?; - let field = store.upsert_schema_field(UpsertSchemaFieldRequest { - name: NonEmptyText::new(args.name)?, - node_classes: parse_node_class_set(args.classes), - presence: args.presence.into(), - severity: args.severity.into(), - role: args.role.into(), - inference_policy: args.inference_policy.into(), - value_type: args.value_type.map(Into::into), - })?; - print_json(&json!({ - "schema": store.schema().schema_ref(), - "field": schema_field_json(&field), - })) -} - -fn run_schema_field_remove(args: SchemaFieldRemoveArgs) -> Result<(), StoreError> { +fn run_frontier_brief_update(args: FrontierBriefUpdateArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let removed_count = store.remove_schema_field(RemoveSchemaFieldRequest { - name: NonEmptyText::new(args.name)?, - node_classes: (!args.classes.is_empty()).then(|| parse_node_class_set(args.classes)), - })?; - print_json(&json!({ - "schema": store.schema().schema_ref(), - "removed_count": removed_count, - })) + let roadmap = if args.roadmap.is_empty() { + None + } else { + Some( + args.roadmap + .into_iter() + .map(parse_roadmap_item) + .collect::<Result<Vec<_>, _>>()?, + ) + }; + let unknowns = if args.unknowns.is_empty() { + None + } else { + Some(to_non_empty_texts(args.unknowns)?) + }; + print_json(&store.update_frontier_brief(UpdateFrontierBriefRequest { + frontier: args.frontier, + expected_revision: args.expected_revision, + situation: cli_text_patch(args.situation, args.clear_situation)?, + roadmap, + unknowns, + })?) } -fn run_node_add(args: NodeAddArgs) -> Result<(), StoreError> { +fn run_hypothesis_record(args: HypothesisRecordArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let class: NodeClass = args.class.into(); - let frontier_id = args - .frontier - .as_deref() - .map(parse_frontier_id) - .transpose()?; - let tags = optional_cli_tags(args.tag_selection, class == NodeClass::Note)?; - let payload = load_payload( - store.schema().schema_ref(), - args.payload_json, - args.payload_file, - args.fields, - )?; - validate_cli_prose_payload(class, args.summary.as_deref(), &payload)?; - let annotations = args - .annotations - .into_iter() - .map(|body| Ok(NodeAnnotation::hidden(NonEmptyText::new(body)?))) - .collect::<Result<Vec<_>, StoreError>>()?; - let node = store.add_node(CreateNodeRequest { - class, - frontier_id, + print_json(&store.create_hypothesis(CreateHypothesisRequest { + frontier: args.frontier, + slug: args.slug.map(Slug::new).transpose()?, title: NonEmptyText::new(args.title)?, - summary: args.summary.map(NonEmptyText::new).transpose()?, - tags, - payload, - annotations, - attachments: lineage_attachments(args.parents)?, - })?; - print_json(&node) + summary: NonEmptyText::new(args.summary)?, + body: NonEmptyText::new(args.body)?, + tags: parse_tag_set(args.tags)?, + parents: parse_vertex_selectors(args.parents)?, + })?) } -fn run_node_list(args: NodeListArgs) -> Result<(), StoreError> { +fn run_hypothesis_list(args: HypothesisListArgs) -> Result<(), StoreError> { let store = open_store(&args.project.project)?; - let items = store.list_nodes(ListNodesQuery { - frontier_id: args - .frontier - .as_deref() - .map(parse_frontier_id) - .transpose()?, - class: args.class.map(Into::into), + print_json(&store.list_hypotheses(ListHypothesesQuery { + frontier: args.frontier, tags: parse_tag_set(args.tags)?, include_archived: args.include_archived, limit: args.limit, - })?; - print_json(&items) -} - -fn run_node_show(args: NodeShowArgs) -> Result<(), StoreError> { - let store = open_store(&args.project.project)?; - let node_id = parse_node_id(&args.node)?; - let node = store - .get_node(node_id)? - .ok_or(StoreError::NodeNotFound(node_id))?; - print_json(&node) + })?) } -fn run_node_annotate(args: NodeAnnotateArgs) -> Result<(), StoreError> { +fn run_hypothesis_update(args: HypothesisUpdateArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let annotation = NodeAnnotation { - id: fidget_spinner_core::AnnotationId::fresh(), - visibility: if args.visible { - AnnotationVisibility::Visible - } else { - AnnotationVisibility::HiddenByDefault - }, - label: args.label.map(NonEmptyText::new).transpose()?, - body: NonEmptyText::new(args.body)?, - created_at: time::OffsetDateTime::now_utc(), + let tags = if args.replace_tags { + Some(parse_tag_set(args.tags)?) + } else { + None }; - store.annotate_node(parse_node_id(&args.node)?, annotation)?; - println!("annotated {}", args.node); - Ok(()) + let parents = if args.replace_parents { + Some(parse_vertex_selectors(args.parents)?) + } else { + None + }; + print_json(&store.update_hypothesis(UpdateHypothesisRequest { + hypothesis: args.hypothesis, + expected_revision: args.expected_revision, + title: args.title.map(NonEmptyText::new).transpose()?, + summary: args.summary.map(NonEmptyText::new).transpose()?, + body: args.body.map(NonEmptyText::new).transpose()?, + tags, + parents, + archived: archive_patch(args.state), + })?) } -fn run_node_archive(args: NodeArchiveArgs) -> Result<(), StoreError> { +fn run_experiment_open(args: ExperimentOpenArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - store.archive_node(parse_node_id(&args.node)?)?; - println!("archived {}", args.node); - Ok(()) + print_json(&store.open_experiment(OpenExperimentRequest { + hypothesis: args.hypothesis, + slug: args.slug.map(Slug::new).transpose()?, + title: NonEmptyText::new(args.title)?, + summary: args.summary.map(NonEmptyText::new).transpose()?, + tags: parse_tag_set(args.tags)?, + parents: parse_vertex_selectors(args.parents)?, + })?) } -fn run_quick_note(args: QuickNoteArgs) -> Result<(), StoreError> { +fn run_experiment_list(args: ExperimentListArgs) -> Result<(), StoreError> { + let store = open_store(&args.project.project)?; + print_json(&store.list_experiments(ListExperimentsQuery { + frontier: args.frontier, + hypothesis: args.hypothesis, + tags: parse_tag_set(args.tags)?, + include_archived: args.include_archived, + status: args.status.map(Into::into), + limit: args.limit, + })?) +} + +fn run_experiment_update(args: ExperimentUpdateArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let payload = NodePayload::with_schema( - store.schema().schema_ref(), - json_object(json!({ "body": args.body }))?, - ); - let node = store.add_node(CreateNodeRequest { - class: NodeClass::Note, - frontier_id: args - .frontier - .as_deref() - .map(parse_frontier_id) - .transpose()?, - title: NonEmptyText::new(args.title)?, - summary: Some(NonEmptyText::new(args.summary)?), - tags: Some(explicit_cli_tags(args.tag_selection)?), - payload, - annotations: Vec::new(), - attachments: lineage_attachments(args.parents)?, - })?; - print_json(&node) + let outcome = + load_optional_json::<ExperimentOutcomePatch>(args.outcome_json, args.outcome_file)?; + print_json(&store.update_experiment(UpdateExperimentRequest { + experiment: args.experiment, + expected_revision: args.expected_revision, + title: args.title.map(NonEmptyText::new).transpose()?, + summary: cli_text_patch(args.summary, args.clear_summary)?, + tags: if args.replace_tags { + Some(parse_tag_set(args.tags)?) + } else { + None + }, + parents: if args.replace_parents { + Some(parse_vertex_selectors(args.parents)?) + } else { + None + }, + archived: archive_patch(args.state), + outcome, + })?) } -fn run_quick_hypothesis(args: QuickHypothesisArgs) -> Result<(), StoreError> { +fn run_experiment_close(args: ExperimentCloseArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let payload = NodePayload::with_schema( - store.schema().schema_ref(), - json_object(json!({ "body": args.body }))?, - ); - let node = store.add_node(CreateNodeRequest { - class: NodeClass::Hypothesis, - frontier_id: Some(parse_frontier_id(&args.frontier)?), - title: NonEmptyText::new(args.title)?, - summary: Some(NonEmptyText::new(args.summary)?), - tags: None, - payload, - annotations: Vec::new(), - attachments: lineage_attachments(args.parents)?, - })?; - print_json(&node) + let analysis = match (args.analysis_summary, args.analysis_body) { + (Some(summary), Some(body)) => Some(ExperimentAnalysis { + summary: NonEmptyText::new(summary)?, + body: NonEmptyText::new(body)?, + }), + (None, None) => None, + _ => { + return Err(invalid_input( + "analysis requires both --analysis-summary and --analysis-body", + )); + } + }; + print_json( + &store.close_experiment(CloseExperimentRequest { + experiment: args.experiment, + expected_revision: args.expected_revision, + backend: args.backend.into(), + command: CommandRecipe::new( + args.working_directory.map(utf8_path), + to_non_empty_texts(args.argv)?, + parse_env(args.env), + )?, + dimensions: parse_dimension_assignments(args.dimensions)?, + primary_metric: parse_metric_value_assignment(&args.primary_metric)?, + supporting_metrics: args + .supporting_metrics + .into_iter() + .map(|raw| parse_metric_value_assignment(&raw)) + .collect::<Result<Vec<_>, _>>()?, + verdict: args.verdict.into(), + rationale: NonEmptyText::new(args.rationale)?, + analysis, + })?, + ) } -fn run_tag_add(args: TagAddArgs) -> Result<(), StoreError> { +fn run_artifact_record(args: ArtifactRecordArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let tag = store.add_tag( - TagName::new(args.name)?, - NonEmptyText::new(args.description)?, - )?; - print_json(&tag) + print_json(&store.create_artifact(CreateArtifactRequest { + slug: args.slug.map(Slug::new).transpose()?, + kind: args.kind.into(), + label: NonEmptyText::new(args.label)?, + summary: args.summary.map(NonEmptyText::new).transpose()?, + locator: NonEmptyText::new(args.locator)?, + media_type: args.media_type.map(NonEmptyText::new).transpose()?, + attachments: parse_attachment_selectors(args.attachments)?, + })?) } -fn run_tag_list(args: ProjectArg) -> Result<(), StoreError> { - let store = open_store(&args.project)?; - print_json(&store.list_tags()?) +fn run_artifact_list(args: ArtifactListArgs) -> Result<(), StoreError> { + let store = open_store(&args.project.project)?; + print_json( + &store.list_artifacts(ListArtifactsQuery { + frontier: args.frontier, + kind: args.kind.map(Into::into), + attached_to: args + .attached_to + .as_deref() + .map(parse_attachment_selector) + .transpose()?, + limit: args.limit, + })?, + ) } -fn run_quick_source(args: QuickSourceArgs) -> Result<(), StoreError> { +fn run_artifact_update(args: ArtifactUpdateArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let payload = NodePayload::with_schema( - store.schema().schema_ref(), - json_object(json!({ "body": args.body }))?, - ); - let node = store.add_node(CreateNodeRequest { - class: NodeClass::Source, - frontier_id: args - .frontier - .as_deref() - .map(parse_frontier_id) - .transpose()?, - title: NonEmptyText::new(args.title)?, - summary: Some(NonEmptyText::new(args.summary)?), - tags: optional_cli_tags(args.tag_selection, false)?, - payload, - annotations: Vec::new(), - attachments: lineage_attachments(args.parents)?, - })?; - print_json(&node) + print_json(&store.update_artifact(UpdateArtifactRequest { + artifact: args.artifact, + expected_revision: args.expected_revision, + kind: args.kind.map(Into::into), + label: args.label.map(NonEmptyText::new).transpose()?, + summary: cli_text_patch(args.summary, args.clear_summary)?, + locator: args.locator.map(NonEmptyText::new).transpose()?, + media_type: cli_text_patch(args.media_type, args.clear_media_type)?, + attachments: if args.replace_attachments { + Some(parse_attachment_selectors(args.attachments)?) + } else { + None + }, + })?) } fn run_metric_define(args: MetricDefineArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let record = store.define_metric(DefineMetricRequest { + print_json(&store.define_metric(DefineMetricRequest { key: NonEmptyText::new(args.key)?, unit: args.unit.into(), objective: args.objective.into(), + visibility: args.visibility.into(), description: args.description.map(NonEmptyText::new).transpose()?, - })?; - print_json(&record) + })?) } fn run_metric_keys(args: MetricKeysArgs) -> Result<(), StoreError> { let store = open_store(&args.project.project)?; - print_json( - &store.list_metric_keys_filtered(MetricKeyQuery { - frontier_id: args - .frontier - .as_deref() - .map(parse_frontier_id) - .transpose()?, - source: args.source.map(Into::into), - dimensions: coerce_cli_dimension_filters(&store, args.dimensions)?, - })?, - ) + print_json(&store.metric_keys(MetricKeysQuery { + frontier: args.frontier, + scope: args.scope.into(), + })?) } fn run_metric_best(args: MetricBestArgs) -> Result<(), StoreError> { let store = open_store(&args.project.project)?; - let entries = store.best_metrics(MetricBestQuery { + print_json(&store.metric_best(MetricBestQuery { + frontier: args.frontier, + hypothesis: args.hypothesis, key: NonEmptyText::new(args.key)?, - frontier_id: args - .frontier - .as_deref() - .map(parse_frontier_id) - .transpose()?, - source: args.source.map(Into::into), - dimensions: coerce_cli_dimension_filters(&store, args.dimensions)?, - order: args.order.map(Into::into), + dimensions: parse_dimension_assignments(args.dimensions)?, + include_rejected: args.include_rejected, limit: args.limit, - })?; - print_json(&entries) -} - -fn run_metric_migrate(args: ProjectArg) -> Result<(), StoreError> { - let mut store = open_store(&args.project)?; - print_json(&store.migrate_metric_plane()?) + order: args.order.map(Into::into), + })?) } fn run_dimension_define(args: DimensionDefineArgs) -> Result<(), StoreError> { let mut store = open_store(&args.project.project)?; - let record = store.define_run_dimension(DefineRunDimensionRequest { + print_json(&store.define_run_dimension(DefineRunDimensionRequest { key: NonEmptyText::new(args.key)?, value_type: args.value_type.into(), description: args.description.map(NonEmptyText::new).transpose()?, - })?; - print_json(&record) -} - -fn run_dimension_list(args: ProjectArg) -> Result<(), StoreError> { - let store = open_store(&args.project)?; - print_json(&store.list_run_dimensions()?) -} - -fn run_experiment_open(args: ExperimentOpenArgs) -> Result<(), StoreError> { - let mut store = open_store(&args.project.project)?; - let summary = args.summary.map(NonEmptyText::new).transpose()?; - let experiment = store.open_experiment(OpenExperimentRequest { - frontier_id: parse_frontier_id(&args.frontier)?, - hypothesis_node_id: parse_node_id(&args.hypothesis_node)?, - title: NonEmptyText::new(args.title)?, - summary, - })?; - print_json(&experiment) -} - -fn run_experiment_list(args: ExperimentListArgs) -> Result<(), StoreError> { - let store = open_store(&args.project.project)?; - let frontier_id = args - .frontier - .as_deref() - .map(parse_frontier_id) - .transpose()?; - print_json(&store.list_open_experiments(frontier_id)?) -} - -fn run_experiment_close(args: ExperimentCloseArgs) -> Result<(), StoreError> { - let mut store = open_store(&args.project.project)?; - let command = CommandRecipe::new( - args.working_directory - .map(utf8_path) - .unwrap_or_else(|| store.project_root().to_path_buf()), - to_text_vec(args.argv)?, - parse_env(args.env), - )?; - let analysis = match ( - args.analysis_title, - args.analysis_summary, - args.analysis_body, - ) { - (Some(title), Some(summary), Some(body)) => Some(ExperimentAnalysisDraft { - title: NonEmptyText::new(title)?, - summary: NonEmptyText::new(summary)?, - body: NonEmptyText::new(body)?, - }), - (None, None, None) => None, - _ => { - return Err(StoreError::Json(serde_json::Error::io( - std::io::Error::new( - std::io::ErrorKind::InvalidInput, - "analysis-title, analysis-summary, and analysis-body must be provided together", - ), - ))); - } - }; - let receipt = store.close_experiment(CloseExperimentRequest { - experiment_id: parse_experiment_id(&args.experiment_id)?, - run_title: NonEmptyText::new(args.run_title)?, - run_summary: args.run_summary.map(NonEmptyText::new).transpose()?, - backend: args.backend.into(), - dimensions: coerce_cli_dimension_filters(&store, args.dimensions)?, - command, - primary_metric: parse_metric_value(args.primary_metric)?, - supporting_metrics: args - .metrics - .into_iter() - .map(parse_metric_value) - .collect::<Result<Vec<_>, _>>()?, - note: FrontierNote { - summary: NonEmptyText::new(args.note)?, - next_hypotheses: to_text_vec(args.next_hypotheses)?, - }, - verdict: args.verdict.into(), - analysis, - decision_title: NonEmptyText::new(args.decision_title)?, - decision_rationale: NonEmptyText::new(args.decision_rationale)?, - })?; - print_json(&receipt) + })?) } fn run_skill_install(args: SkillInstallArgs) -> Result<(), StoreError> { @@ -1240,11 +1078,11 @@ fn install_skill(skill: bundled_skill::BundledSkill, destination: &Path) -> Resu Ok(()) } -fn open_store(path: &Path) -> Result<ProjectStore, StoreError> { +pub(crate) fn open_store(path: &Path) -> Result<ProjectStore, StoreError> { ProjectStore::open(utf8_path(path.to_path_buf())) } -fn resolve_ui_project_root(path: &Utf8Path) -> Result<Utf8PathBuf, StoreError> { +pub(crate) fn resolve_ui_project_root(path: &Utf8Path) -> Result<Utf8PathBuf, StoreError> { if let Some(project_root) = fidget_spinner_store_sqlite::discover_project_root(path) { return Ok(project_root); } @@ -1266,7 +1104,7 @@ fn resolve_ui_project_root(path: &Utf8Path) -> Result<Utf8PathBuf, StoreError> { } } -fn open_or_init_store_for_binding(path: &Path) -> Result<ProjectStore, StoreError> { +pub(crate) fn open_or_init_store_for_binding(path: &Path) -> Result<ProjectStore, StoreError> { let requested_root = utf8_path(path.to_path_buf()); match ProjectStore::open(requested_root.clone()) { Ok(store) => Ok(store), @@ -1275,17 +1113,13 @@ fn open_or_init_store_for_binding(path: &Path) -> Result<ProjectStore, StoreErro if !is_empty_directory(&project_root)? { return Err(StoreError::MissingProjectStore(requested_root)); } - ProjectStore::init( - &project_root, - default_display_name_for_root(&project_root)?, - default_namespace_for_root(&project_root)?, - ) + ProjectStore::init(&project_root, default_display_name_for_root(&project_root)?) } Err(error) => Err(error), } } -fn utf8_path(path: impl Into<PathBuf>) -> Utf8PathBuf { +pub(crate) fn utf8_path(path: impl Into<PathBuf>) -> Utf8PathBuf { Utf8PathBuf::from(path.into().to_string_lossy().into_owned()) } @@ -1295,7 +1129,7 @@ fn binding_bootstrap_root(path: &Utf8Path) -> Result<Utf8PathBuf, StoreError> { .parent() .map_or_else(|| path.to_path_buf(), Utf8Path::to_path_buf)), Ok(_) => Ok(path.to_path_buf()), - Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(path.to_path_buf()), + Err(error) if error.kind() == io::ErrorKind::NotFound => Ok(path.to_path_buf()), Err(error) => Err(StoreError::from(error)), } } @@ -1307,7 +1141,7 @@ fn is_empty_directory(path: &Utf8Path) -> Result<bool, StoreError> { Ok(entries.next().transpose()?.is_none()) } Ok(_) => Ok(false), - Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(false), + Err(error) if error.kind() == io::ErrorKind::NotFound => Ok(false), Err(error) => Err(StoreError::from(error)), } } @@ -1325,7 +1159,7 @@ fn collect_descendant_project_roots( ) -> Result<(), StoreError> { let metadata = match fs::metadata(path.as_std_path()) { Ok(metadata) => metadata, - Err(error) if error.kind() == std::io::ErrorKind::NotFound => return Ok(()), + Err(error) if error.kind() == io::ErrorKind::NotFound => return Ok(()), Err(error) => return Err(StoreError::from(error)), }; if metadata.is_file() { @@ -1362,45 +1196,6 @@ fn default_display_name_for_root(project_root: &Utf8Path) -> Result<NonEmptyText .map_err(StoreError::from) } -fn default_namespace_for_root(project_root: &Utf8Path) -> Result<NonEmptyText, StoreError> { - let slug = slugify_namespace_component(project_root.file_name().unwrap_or("project")); - NonEmptyText::new(format!("local.{slug}")).map_err(StoreError::from) -} - -fn slugify_namespace_component(raw: &str) -> String { - let mut slug = String::new(); - let mut previous_was_separator = false; - for character in raw.chars().flat_map(char::to_lowercase) { - if character.is_ascii_alphanumeric() { - slug.push(character); - previous_was_separator = false; - continue; - } - if !previous_was_separator { - slug.push('_'); - previous_was_separator = true; - } - } - let slug = slug.trim_matches('_').to_owned(); - if slug.is_empty() { - "project".to_owned() - } else { - slug - } -} - -fn to_text_vec(values: Vec<String>) -> Result<Vec<NonEmptyText>, StoreError> { - values - .into_iter() - .map(NonEmptyText::new) - .collect::<Result<Vec<_>, _>>() - .map_err(StoreError::from) -} - -fn to_text_set(values: Vec<String>) -> Result<BTreeSet<NonEmptyText>, StoreError> { - to_text_vec(values).map(BTreeSet::from_iter) -} - fn parse_tag_set(values: Vec<String>) -> Result<BTreeSet<TagName>, StoreError> { values .into_iter() @@ -1409,290 +1204,198 @@ fn parse_tag_set(values: Vec<String>) -> Result<BTreeSet<TagName>, StoreError> { .map_err(StoreError::from) } -fn explicit_cli_tags(selection: ExplicitTagSelectionArgs) -> Result<BTreeSet<TagName>, StoreError> { - optional_cli_tags(selection, true)?.ok_or(StoreError::NoteTagsRequired) -} - -fn optional_cli_tags( - selection: ExplicitTagSelectionArgs, - required: bool, -) -> Result<Option<BTreeSet<TagName>>, StoreError> { - if selection.no_tags { - return Ok(Some(BTreeSet::new())); - } - if selection.tags.is_empty() { - return if required { - Err(StoreError::NoteTagsRequired) - } else { - Ok(None) - }; - } - Ok(Some(parse_tag_set(selection.tags)?)) -} - -fn parse_env(values: Vec<String>) -> BTreeMap<String, String> { +pub(crate) fn parse_vertex_selectors( + values: Vec<String>, +) -> Result<Vec<VertexSelector>, StoreError> { values .into_iter() - .filter_map(|entry| { - let (key, value) = entry.split_once('=')?; - Some((key.to_owned(), value.to_owned())) + .map(|raw| { + let (kind, selector) = raw + .split_once(':') + .ok_or_else(|| invalid_input("expected parent selector in the form `hypothesis:<selector>` or `experiment:<selector>`"))?; + match kind { + "hypothesis" => Ok(VertexSelector::Hypothesis(selector.to_owned())), + "experiment" => Ok(VertexSelector::Experiment(selector.to_owned())), + _ => Err(invalid_input(format!("unknown parent kind `{kind}`"))), + } }) .collect() } -fn lineage_attachments(parents: Vec<String>) -> Result<Vec<EdgeAttachment>, StoreError> { - parents +pub(crate) fn parse_attachment_selectors( + values: Vec<String>, +) -> Result<Vec<AttachmentSelector>, StoreError> { + values .into_iter() - .map(|parent| { - Ok(EdgeAttachment { - node_id: parse_node_id(&parent)?, - kind: fidget_spinner_core::EdgeKind::Lineage, - direction: EdgeAttachmentDirection::ExistingToNew, - }) - }) + .map(|raw| parse_attachment_selector(&raw)) .collect() } -fn load_payload( - schema: fidget_spinner_core::PayloadSchemaRef, - payload_json: Option<String>, - payload_file: Option<PathBuf>, - fields: Vec<String>, -) -> Result<NodePayload, StoreError> { - let mut map = Map::new(); - if let Some(text) = payload_json { - map.extend(json_object(serde_json::from_str::<Value>(&text)?)?); - } - if let Some(path) = payload_file { - let text = fs::read_to_string(path)?; - map.extend(json_object(serde_json::from_str::<Value>(&text)?)?); +pub(crate) fn parse_attachment_selector(raw: &str) -> Result<AttachmentSelector, StoreError> { + let (kind, selector) = raw + .split_once(':') + .ok_or_else(|| invalid_input("expected attachment selector in the form `frontier:<selector>`, `hypothesis:<selector>`, or `experiment:<selector>`"))?; + match kind { + "frontier" => Ok(AttachmentSelector::Frontier(selector.to_owned())), + "hypothesis" => Ok(AttachmentSelector::Hypothesis(selector.to_owned())), + "experiment" => Ok(AttachmentSelector::Experiment(selector.to_owned())), + _ => Err(invalid_input(format!("unknown attachment kind `{kind}`"))), } - for field in fields { - let Some((key, raw_value)) = field.split_once('=') else { - continue; - }; - let value = serde_json::from_str::<Value>(raw_value).unwrap_or_else(|_| json!(raw_value)); - let _ = map.insert(key.to_owned(), value); - } - Ok(NodePayload::with_schema(schema, map)) } -fn validate_cli_prose_payload( - class: NodeClass, - summary: Option<&str>, - payload: &NodePayload, -) -> Result<(), StoreError> { - if !matches!(class, NodeClass::Note | NodeClass::Source) { - return Ok(()); - } - if summary.is_none() { - return Err(StoreError::ProseSummaryRequired(class)); - } - match payload.field("body") { - Some(Value::String(body)) if !body.trim().is_empty() => Ok(()), - _ => Err(StoreError::ProseBodyRequired(class)), - } +fn parse_roadmap_item(raw: String) -> Result<FrontierRoadmapItemDraft, StoreError> { + let mut parts = raw.splitn(3, ':'); + let rank = parts + .next() + .ok_or_else(|| invalid_input("roadmap items must look like `rank:hypothesis[:summary]`"))? + .parse::<u32>() + .map_err(|error| invalid_input(format!("invalid roadmap rank: {error}")))?; + let hypothesis = parts + .next() + .ok_or_else(|| invalid_input("roadmap items must include a hypothesis selector"))? + .to_owned(); + let summary = parts + .next() + .map(NonEmptyText::new) + .transpose() + .map_err(StoreError::from)?; + Ok(FrontierRoadmapItemDraft { + rank, + hypothesis, + summary, + }) } -fn json_object(value: Value) -> Result<Map<String, Value>, StoreError> { - match value { - Value::Object(map) => Ok(map), - other => Err(invalid_input(format!( - "expected JSON object, got {other:?}" - ))), - } +pub(crate) fn parse_env(values: Vec<String>) -> BTreeMap<String, String> { + values + .into_iter() + .filter_map(|entry| { + let (key, value) = entry.split_once('=')?; + Some((key.to_owned(), value.to_owned())) + }) + .collect() } -fn schema_field_json(field: &ProjectFieldSpec) -> Value { - json!({ - "name": field.name, - "node_classes": field.node_classes.iter().map(ToString::to_string).collect::<Vec<_>>(), - "presence": field.presence.as_str(), - "severity": field.severity.as_str(), - "role": field.role.as_str(), - "inference_policy": field.inference_policy.as_str(), - "value_type": field.value_type.map(FieldValueType::as_str), +fn parse_metric_value_assignment( + raw: &str, +) -> Result<fidget_spinner_core::MetricValue, StoreError> { + let (key, value) = raw + .split_once('=') + .ok_or_else(|| invalid_input("expected metric assignment in the form `key=value`"))?; + let value = value + .parse::<f64>() + .map_err(|error| invalid_input(format!("invalid metric value `{value}`: {error}")))?; + Ok(fidget_spinner_core::MetricValue { + key: NonEmptyText::new(key.to_owned())?, + value, }) } -fn parse_node_class_set(classes: Vec<CliNodeClass>) -> BTreeSet<NodeClass> { - classes.into_iter().map(Into::into).collect() -} - -fn run_git(project_root: &Utf8Path, args: &[&str]) -> Result<Option<String>, StoreError> { - let output = std::process::Command::new("git") - .arg("-C") - .arg(project_root.as_str()) - .args(args) - .output()?; - if !output.status.success() { - return Ok(None); - } - let text = String::from_utf8_lossy(&output.stdout).trim().to_owned(); - if text.is_empty() { - return Ok(None); - } - Ok(Some(text)) +pub(crate) fn parse_dimension_assignments( + values: Vec<String>, +) -> Result<BTreeMap<NonEmptyText, RunDimensionValue>, StoreError> { + values + .into_iter() + .map(|entry| { + let (key, raw_value) = entry.split_once('=').ok_or_else(|| { + invalid_input("expected dimension assignment in the form `key=value`") + })?; + let json_value = serde_json::from_str::<Value>(raw_value) + .unwrap_or_else(|_| Value::String(raw_value.to_owned())); + Ok(( + NonEmptyText::new(key.to_owned())?, + json_to_dimension_value(json_value)?, + )) + }) + .collect() } -fn maybe_print_gitignore_hint(project_root: &Utf8Path) -> Result<(), StoreError> { - if run_git(project_root, &["rev-parse", "--show-toplevel"])?.is_none() { - return Ok(()); - } - - let status = std::process::Command::new("git") - .arg("-C") - .arg(project_root.as_str()) - .args(["check-ignore", "-q", ".fidget_spinner"]) - .status()?; - - match status.code() { - Some(0) => Ok(()), - Some(1) => { - println!( - "note: add `.fidget_spinner/` to `.gitignore` or `.git/info/exclude` if you do not want local state in `git status`" - ); - Ok(()) +fn json_to_dimension_value(value: Value) -> Result<RunDimensionValue, StoreError> { + match value { + Value::String(raw) => { + if time::OffsetDateTime::parse(&raw, &time::format_description::well_known::Rfc3339) + .is_ok() + { + Ok(RunDimensionValue::Timestamp(NonEmptyText::new(raw)?)) + } else { + Ok(RunDimensionValue::String(NonEmptyText::new(raw)?)) + } } - _ => Ok(()), + Value::Number(number) => number + .as_f64() + .map(RunDimensionValue::Numeric) + .ok_or_else(|| invalid_input("numeric dimension values must fit into f64")), + Value::Bool(value) => Ok(RunDimensionValue::Boolean(value)), + _ => Err(invalid_input( + "dimension values must be string, number, boolean, or RFC3339 timestamp", + )), } } -fn parse_metric_value(raw: String) -> Result<MetricValue, StoreError> { - let Some((key, value)) = raw.split_once('=') else { - return Err(invalid_input("metrics must look like key=value")); - }; - Ok(MetricValue { - key: NonEmptyText::new(key)?, - value: value - .parse::<f64>() - .map_err(|error| invalid_input(format!("invalid metric value: {error}")))?, - }) -} - -fn coerce_cli_dimension_filters( - store: &ProjectStore, - raw_dimensions: Vec<String>, -) -> Result<BTreeMap<NonEmptyText, fidget_spinner_core::RunDimensionValue>, StoreError> { - let definitions = store - .list_run_dimensions()? - .into_iter() - .map(|summary| (summary.key.to_string(), summary.value_type)) - .collect::<BTreeMap<_, _>>(); - let raw_dimensions = parse_dimension_assignments(raw_dimensions)? - .into_iter() - .map(|(key, raw_value)| { - let Some(value_type) = definitions.get(&key) else { - return Err(invalid_input(format!( - "unknown run dimension `{key}`; register it first" - ))); - }; - Ok((key, parse_cli_dimension_value(*value_type, &raw_value)?)) - }) - .collect::<Result<BTreeMap<_, _>, StoreError>>()?; - store.coerce_run_dimensions(raw_dimensions) -} - -fn parse_dimension_assignments( - raw_dimensions: Vec<String>, -) -> Result<BTreeMap<String, String>, StoreError> { - raw_dimensions +fn to_non_empty_texts(values: Vec<String>) -> Result<Vec<NonEmptyText>, StoreError> { + values .into_iter() - .map(|raw| { - let Some((key, value)) = raw.split_once('=') else { - return Err(invalid_input("dimensions must look like key=value")); - }; - Ok((key.to_owned(), value.to_owned())) - }) - .collect() + .map(NonEmptyText::new) + .collect::<Result<Vec<_>, _>>() + .map_err(StoreError::from) } -fn parse_cli_dimension_value(value_type: FieldValueType, raw: &str) -> Result<Value, StoreError> { - match value_type { - FieldValueType::String | FieldValueType::Timestamp => Ok(Value::String(raw.to_owned())), - FieldValueType::Numeric => Ok(json!(raw.parse::<f64>().map_err(|error| { - invalid_input(format!("invalid numeric dimension value: {error}")) - })?)), - FieldValueType::Boolean => match raw { - "true" => Ok(Value::Bool(true)), - "false" => Ok(Value::Bool(false)), - other => Err(invalid_input(format!( - "invalid boolean dimension value `{other}`" - ))), - }, +fn load_optional_json<T: for<'de> serde::Deserialize<'de>>( + inline: Option<String>, + file: Option<PathBuf>, +) -> Result<Option<T>, StoreError> { + match (inline, file) { + (Some(raw), None) => serde_json::from_str(&raw) + .map(Some) + .map_err(StoreError::from), + (None, Some(path)) => serde_json::from_slice(&fs::read(path)?) + .map(Some) + .map_err(StoreError::from), + (None, None) => Ok(None), + (Some(_), Some(_)) => Err(invalid_input( + "use only one of --outcome-json or --outcome-file", + )), } } -fn parse_metric_unit(raw: &str) -> Result<MetricUnit, StoreError> { - match raw { - "seconds" => Ok(MetricUnit::Seconds), - "bytes" => Ok(MetricUnit::Bytes), - "count" => Ok(MetricUnit::Count), - "ratio" => Ok(MetricUnit::Ratio), - "custom" => Ok(MetricUnit::Custom), - other => Err(invalid_input(format!("unknown metric unit `{other}`"))), +const fn archive_patch(state: Option<CliArchivePatch>) -> Option<bool> { + match state { + None => None, + Some(CliArchivePatch::Archive) => Some(true), + Some(CliArchivePatch::Restore) => Some(false), } } -fn parse_optimization_objective(raw: &str) -> Result<OptimizationObjective, StoreError> { - match raw { - "minimize" => Ok(OptimizationObjective::Minimize), - "maximize" => Ok(OptimizationObjective::Maximize), - "target" => Ok(OptimizationObjective::Target), - other => Err(invalid_input(format!( - "unknown optimization objective `{other}`" - ))), +fn cli_text_patch( + value: Option<String>, + clear: bool, +) -> Result<Option<TextPatch<NonEmptyText>>, StoreError> { + if clear { + if value.is_some() { + return Err(invalid_input("cannot set and clear the same field")); + } + return Ok(Some(TextPatch::Clear)); } + value + .map(NonEmptyText::new) + .transpose() + .map(|value| value.map(TextPatch::Set)) + .map_err(StoreError::from) } -fn parse_node_id(raw: &str) -> Result<fidget_spinner_core::NodeId, StoreError> { - Ok(fidget_spinner_core::NodeId::from_uuid(Uuid::parse_str( - raw, - )?)) -} - -fn parse_frontier_id(raw: &str) -> Result<fidget_spinner_core::FrontierId, StoreError> { - Ok(fidget_spinner_core::FrontierId::from_uuid(Uuid::parse_str( - raw, - )?)) +fn invalid_input(message: impl Into<String>) -> StoreError { + StoreError::InvalidInput(message.into()) } -fn parse_experiment_id(raw: &str) -> Result<fidget_spinner_core::ExperimentId, StoreError> { - Ok(fidget_spinner_core::ExperimentId::from_uuid( - Uuid::parse_str(raw)?, - )) +pub(crate) fn to_pretty_json(value: &impl Serialize) -> Result<String, StoreError> { + serde_json::to_string_pretty(value).map_err(StoreError::from) } -fn print_json<T: Serialize>(value: &T) -> Result<(), StoreError> { +fn print_json(value: &impl Serialize) -> Result<(), StoreError> { println!("{}", to_pretty_json(value)?); Ok(()) } -fn to_pretty_json<T: Serialize>(value: &T) -> Result<String, StoreError> { - serde_json::to_string_pretty(value).map_err(StoreError::from) -} - -fn invalid_input(message: impl Into<String>) -> StoreError { - StoreError::Json(serde_json::Error::io(std::io::Error::new( - std::io::ErrorKind::InvalidInput, - message.into(), - ))) -} - -impl From<CliNodeClass> for NodeClass { - fn from(value: CliNodeClass) -> Self { - match value { - CliNodeClass::Contract => Self::Contract, - CliNodeClass::Hypothesis => Self::Hypothesis, - CliNodeClass::Run => Self::Run, - CliNodeClass::Analysis => Self::Analysis, - CliNodeClass::Decision => Self::Decision, - CliNodeClass::Source => Self::Source, - CliNodeClass::Note => Self::Note, - } - } -} - impl From<CliMetricUnit> for MetricUnit { fn from(value: CliMetricUnit) -> Self { match value { @@ -1715,33 +1418,32 @@ impl From<CliOptimizationObjective> for OptimizationObjective { } } -impl From<CliExecutionBackend> for ExecutionBackend { - fn from(value: CliExecutionBackend) -> Self { +impl From<CliMetricVisibility> for MetricVisibility { + fn from(value: CliMetricVisibility) -> Self { match value { - CliExecutionBackend::Local => Self::LocalProcess, - CliExecutionBackend::Worktree => Self::WorktreeProcess, - CliExecutionBackend::Ssh => Self::SshProcess, + CliMetricVisibility::Canonical => Self::Canonical, + CliMetricVisibility::Minor => Self::Minor, + CliMetricVisibility::Hidden => Self::Hidden, + CliMetricVisibility::Archived => Self::Archived, } } } -impl From<CliMetricSource> for MetricFieldSource { - fn from(value: CliMetricSource) -> Self { +impl From<CliMetricScope> for MetricScope { + fn from(value: CliMetricScope) -> Self { match value { - CliMetricSource::RunMetric => Self::RunMetric, - CliMetricSource::HypothesisPayload => Self::HypothesisPayload, - CliMetricSource::RunPayload => Self::RunPayload, - CliMetricSource::AnalysisPayload => Self::AnalysisPayload, - CliMetricSource::DecisionPayload => Self::DecisionPayload, + CliMetricScope::Live => Self::Live, + CliMetricScope::Visible => Self::Visible, + CliMetricScope::All => Self::All, } } } -impl From<CliMetricOrder> for MetricRankOrder { - fn from(value: CliMetricOrder) -> Self { +impl From<CliMetricRankOrder> for MetricRankOrder { + fn from(value: CliMetricRankOrder) -> Self { match value { - CliMetricOrder::Asc => Self::Asc, - CliMetricOrder::Desc => Self::Desc, + CliMetricRankOrder::Asc => Self::Asc, + CliMetricRankOrder::Desc => Self::Desc, } } } @@ -1757,42 +1459,28 @@ impl From<CliFieldValueType> for FieldValueType { } } -impl From<CliDiagnosticSeverity> for DiagnosticSeverity { - fn from(value: CliDiagnosticSeverity) -> Self { - match value { - CliDiagnosticSeverity::Error => Self::Error, - CliDiagnosticSeverity::Warning => Self::Warning, - CliDiagnosticSeverity::Info => Self::Info, - } - } -} - -impl From<CliFieldPresence> for FieldPresence { - fn from(value: CliFieldPresence) -> Self { - match value { - CliFieldPresence::Required => Self::Required, - CliFieldPresence::Recommended => Self::Recommended, - CliFieldPresence::Optional => Self::Optional, - } - } -} - -impl From<CliFieldRole> for FieldRole { - fn from(value: CliFieldRole) -> Self { +impl From<CliArtifactKind> for ArtifactKind { + fn from(value: CliArtifactKind) -> Self { match value { - CliFieldRole::Index => Self::Index, - CliFieldRole::ProjectionGate => Self::ProjectionGate, - CliFieldRole::RenderOnly => Self::RenderOnly, - CliFieldRole::Opaque => Self::Opaque, + CliArtifactKind::Document => Self::Document, + CliArtifactKind::Link => Self::Link, + CliArtifactKind::Log => Self::Log, + CliArtifactKind::Table => Self::Table, + CliArtifactKind::Plot => Self::Plot, + CliArtifactKind::Dump => Self::Dump, + CliArtifactKind::Binary => Self::Binary, + CliArtifactKind::Other => Self::Other, } } } -impl From<CliInferencePolicy> for InferencePolicy { - fn from(value: CliInferencePolicy) -> Self { +impl From<CliExecutionBackend> for ExecutionBackend { + fn from(value: CliExecutionBackend) -> Self { match value { - CliInferencePolicy::ManualOnly => Self::ManualOnly, - CliInferencePolicy::ModelMayInfer => Self::ModelMayInfer, + CliExecutionBackend::Manual => Self::Manual, + CliExecutionBackend::LocalProcess => Self::LocalProcess, + CliExecutionBackend::WorktreeProcess => Self::WorktreeProcess, + CliExecutionBackend::SshProcess => Self::SshProcess, } } } @@ -1808,89 +1496,11 @@ impl From<CliFrontierVerdict> for FrontierVerdict { } } -#[cfg(test)] -mod tests { - use super::resolve_ui_project_root; - use std::fs; - - use camino::Utf8PathBuf; - use fidget_spinner_core::NonEmptyText; - use fidget_spinner_store_sqlite::{ - PROJECT_CONFIG_NAME, ProjectStore, STORE_DIR_NAME, StoreError, - }; - - fn temp_project_root(label: &str) -> Utf8PathBuf { - let mut path = std::env::temp_dir(); - path.push(format!( - "fidget_spinner_cli_test_{}_{}", - label, - uuid::Uuid::now_v7() - )); - Utf8PathBuf::from(path.to_string_lossy().into_owned()) - } - - #[test] - fn ui_resolver_accepts_state_root_and_descendants() -> Result<(), StoreError> { - let project_root = temp_project_root("ui_resolve_state_root"); - let _store = ProjectStore::init( - &project_root, - NonEmptyText::new("ui dogfood")?, - NonEmptyText::new("local.ui")?, - )?; - let state_root = project_root.join(STORE_DIR_NAME); - let config_path = state_root.join(PROJECT_CONFIG_NAME); - - assert_eq!(resolve_ui_project_root(&state_root)?, project_root); - assert_eq!(resolve_ui_project_root(&config_path)?, project_root); - Ok(()) - } - - #[test] - fn ui_resolver_accepts_unique_descendant_store_from_parent() -> Result<(), StoreError> { - let parent_root = temp_project_root("ui_resolve_parent"); - let nested_project = parent_root.join("nested/libgrid"); - fs::create_dir_all(nested_project.as_std_path())?; - let _store = ProjectStore::init( - &nested_project, - NonEmptyText::new("nested ui dogfood")?, - NonEmptyText::new("local.nested.ui")?, - )?; - - assert_eq!(resolve_ui_project_root(&parent_root)?, nested_project); - Ok(()) - } - - #[test] - fn ui_resolver_rejects_ambiguous_descendant_stores() -> Result<(), StoreError> { - let parent_root = temp_project_root("ui_resolve_ambiguous"); - let alpha_project = parent_root.join("alpha"); - let beta_project = parent_root.join("beta"); - fs::create_dir_all(alpha_project.as_std_path())?; - fs::create_dir_all(beta_project.as_std_path())?; - let _alpha = ProjectStore::init( - &alpha_project, - NonEmptyText::new("alpha")?, - NonEmptyText::new("local.alpha")?, - )?; - let _beta = ProjectStore::init( - &beta_project, - NonEmptyText::new("beta")?, - NonEmptyText::new("local.beta")?, - )?; - - let error = match resolve_ui_project_root(&parent_root) { - Ok(project_root) => { - return Err(StoreError::Io(std::io::Error::other(format!( - "expected ambiguous descendant discovery failure, got {project_root}" - )))); - } - Err(error) => error, - }; - assert!( - error - .to_string() - .contains("multiple descendant project stores") - ); - Ok(()) +impl From<CliExperimentStatus> for ExperimentStatus { + fn from(value: CliExperimentStatus) -> Self { + match value { + CliExperimentStatus::Open => Self::Open, + CliExperimentStatus::Closed => Self::Closed, + } } } |