swarm repositories / source
aboutsummaryrefslogtreecommitdiff
path: root/crates/fidget-spinner-cli/src
diff options
context:
space:
mode:
Diffstat (limited to 'crates/fidget-spinner-cli/src')
-rw-r--r--crates/fidget-spinner-cli/src/main.rs2
-rw-r--r--crates/fidget-spinner-cli/src/mcp/host/runtime.rs19
-rw-r--r--crates/fidget-spinner-cli/src/mcp/mod.rs1
-rw-r--r--crates/fidget-spinner-cli/src/mcp/output.rs68
-rw-r--r--crates/fidget-spinner-cli/src/mcp/projection.rs1145
-rw-r--r--crates/fidget-spinner-cli/src/mcp/service.rs748
-rw-r--r--crates/fidget-spinner-cli/src/ui.rs188
7 files changed, 1975 insertions, 196 deletions
diff --git a/crates/fidget-spinner-cli/src/main.rs b/crates/fidget-spinner-cli/src/main.rs
index 9de2515..2c026d1 100644
--- a/crates/fidget-spinner-cli/src/main.rs
+++ b/crates/fidget-spinner-cli/src/main.rs
@@ -24,6 +24,8 @@ use fidget_spinner_store_sqlite::{
UpdateArtifactRequest, UpdateExperimentRequest, UpdateFrontierBriefRequest,
UpdateHypothesisRequest, VertexSelector,
};
+#[cfg(test)]
+use libmcp_testkit as _;
use serde::Serialize;
use serde_json::Value;
diff --git a/crates/fidget-spinner-cli/src/mcp/host/runtime.rs b/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
index f2f10b7..bacf1c8 100644
--- a/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
+++ b/crates/fidget-spinner-cli/src/mcp/host/runtime.rs
@@ -21,7 +21,9 @@ use crate::mcp::catalog::{
DispatchTarget, list_resources, resource_spec, tool_definitions, tool_spec,
};
use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
-use crate::mcp::output::{ToolOutput, detailed_tool_output, split_presentation, tool_success};
+use crate::mcp::output::{
+ ToolOutput, fallback_detailed_tool_output, split_presentation, tool_success,
+};
use crate::mcp::protocol::{
CRASH_ONCE_ENV, FORCE_ROLLOUT_ENV, HOST_STATE_ENV, HostRequestId, HostStateSeed,
PROTOCOL_VERSION, ProjectBindingSeed, SERVER_NAME, WorkerOperation, WorkerSpawnConfig,
@@ -749,7 +751,7 @@ fn project_bind_output(status: &ProjectBindStatus) -> Result<ToolOutput, FaultRe
if status.requested_path != status.project_root {
let _ = concise.insert("requested_path".to_owned(), json!(status.requested_path));
}
- detailed_tool_output(
+ fallback_detailed_tool_output(
&Value::Object(concise),
status,
[
@@ -766,6 +768,7 @@ fn project_bind_output(status: &ProjectBindStatus) -> Result<ToolOutput, FaultRe
]
.join("\n"),
None,
+ libmcp::SurfaceKind::Mutation,
FaultStage::Host,
"tools/call:project.bind",
)
@@ -787,18 +790,19 @@ fn skill_list_output() -> Result<ToolOutput, FaultRecord> {
.iter()
.map(|skill| format!("{}: {}", skill.name, skill.description)),
);
- detailed_tool_output(
+ fallback_detailed_tool_output(
&concise,
&json!({ "skills": skills }),
lines.join("\n"),
None,
+ libmcp::SurfaceKind::List,
FaultStage::Host,
"tools/call:skill.list",
)
}
fn skill_show_output(skill: crate::bundled_skill::BundledSkill) -> Result<ToolOutput, FaultRecord> {
- detailed_tool_output(
+ fallback_detailed_tool_output(
&json!({
"name": skill.name,
"resource_uri": skill.resource_uri,
@@ -812,6 +816,7 @@ fn skill_show_output(skill: crate::bundled_skill::BundledSkill) -> Result<ToolOu
}),
skill.body,
None,
+ libmcp::SurfaceKind::Read,
FaultStage::Host,
"tools/call:skill.show",
)
@@ -874,11 +879,12 @@ fn system_health_output(health: &HealthSnapshot) -> Result<ToolOutput, FaultReco
""
}
));
- detailed_tool_output(
+ fallback_detailed_tool_output(
&Value::Object(concise),
health,
lines.join("\n"),
None,
+ libmcp::SurfaceKind::Ops,
FaultStage::Host,
"tools/call:system.health",
)
@@ -976,11 +982,12 @@ fn system_telemetry_output(telemetry: &ServerTelemetry) -> Result<ToolOutput, Fa
if let Some(fault) = telemetry.last_fault.as_ref() {
lines.push(format!("last fault: {} {}", fault.operation, fault.message));
}
- detailed_tool_output(
+ fallback_detailed_tool_output(
&Value::Object(concise),
telemetry,
lines.join("\n"),
None,
+ libmcp::SurfaceKind::Ops,
FaultStage::Host,
"tools/call:system.telemetry",
)
diff --git a/crates/fidget-spinner-cli/src/mcp/mod.rs b/crates/fidget-spinner-cli/src/mcp/mod.rs
index d219e96..435c20a 100644
--- a/crates/fidget-spinner-cli/src/mcp/mod.rs
+++ b/crates/fidget-spinner-cli/src/mcp/mod.rs
@@ -2,6 +2,7 @@ mod catalog;
mod fault;
mod host;
mod output;
+mod projection;
mod protocol;
mod service;
mod telemetry;
diff --git a/crates/fidget-spinner-cli/src/mcp/output.rs b/crates/fidget-spinner-cli/src/mcp/output.rs
index 01b7bc1..2e11e20 100644
--- a/crates/fidget-spinner-cli/src/mcp/output.rs
+++ b/crates/fidget-spinner-cli/src/mcp/output.rs
@@ -1,14 +1,12 @@
use libmcp::{
- DetailLevel, JsonPorcelainConfig, RenderMode, render_json_porcelain,
- with_presentation_properties,
+ DetailLevel, FallbackJsonProjection, JsonPorcelainConfig, ProjectionError, RenderMode,
+ SurfaceKind, ToolProjection, render_json_porcelain, with_presentation_properties,
};
use serde::Serialize;
use serde_json::{Value, json};
use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
-const CONCISE_PORCELAIN_MAX_LINES: usize = 12;
-const CONCISE_PORCELAIN_MAX_INLINE_CHARS: usize = 160;
const FULL_PORCELAIN_MAX_LINES: usize = 40;
const FULL_PORCELAIN_MAX_INLINE_CHARS: usize = 512;
@@ -99,43 +97,54 @@ pub(crate) fn split_presentation(
Ok((Presentation { render, detail }, Value::Object(object)))
}
-pub(crate) fn tool_output(
- value: &impl Serialize,
+pub(crate) fn projected_tool_output(
+ projection: &impl ToolProjection,
+ concise_text: impl Into<String>,
+ full_text: Option<String>,
stage: FaultStage,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let structured = serde_json::to_value(value).map_err(|error| {
- FaultRecord::new(FaultKind::Internal, stage, operation, error.to_string())
- })?;
- let concise_text = render_json_porcelain(&structured, concise_porcelain_config());
+ let concise = projection
+ .concise_projection()
+ .map_err(|error| projection_fault(error, stage, operation))?;
+ let full = projection
+ .full_projection()
+ .map_err(|error| projection_fault(error, stage, operation))?;
Ok(ToolOutput::from_values(
- structured.clone(),
- structured,
+ concise,
+ full,
concise_text,
- None,
+ full_text,
))
}
-pub(crate) fn detailed_tool_output(
+pub(crate) fn fallback_tool_output(
+ concise: &impl Serialize,
+ full: &impl Serialize,
+ kind: SurfaceKind,
+ stage: FaultStage,
+ operation: &str,
+) -> Result<ToolOutput, FaultRecord> {
+ let projection = FallbackJsonProjection::new(concise, full, kind)
+ .map_err(|error| projection_fault(error, stage, operation))?;
+ let concise_text = projection
+ .porcelain_projection(DetailLevel::Concise)
+ .map_err(|error| projection_fault(error, stage, operation))?;
+ projected_tool_output(&projection, concise_text, None, stage, operation)
+}
+
+pub(crate) fn fallback_detailed_tool_output(
concise: &impl Serialize,
full: &impl Serialize,
concise_text: impl Into<String>,
full_text: Option<String>,
+ kind: SurfaceKind,
stage: FaultStage,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = serde_json::to_value(concise).map_err(|error| {
- FaultRecord::new(FaultKind::Internal, stage, operation, error.to_string())
- })?;
- let full = serde_json::to_value(full).map_err(|error| {
- FaultRecord::new(FaultKind::Internal, stage, operation, error.to_string())
- })?;
- Ok(ToolOutput::from_values(
- concise,
- full,
- concise_text,
- full_text,
- ))
+ let projection = FallbackJsonProjection::new(concise, full, kind)
+ .map_err(|error| projection_fault(error, stage, operation))?;
+ projected_tool_output(&projection, concise_text, full_text, stage, operation)
}
pub(crate) fn tool_success(
@@ -165,11 +174,8 @@ pub(crate) fn with_common_presentation(schema: Value) -> Value {
with_presentation_properties(schema)
}
-const fn concise_porcelain_config() -> JsonPorcelainConfig {
- JsonPorcelainConfig {
- max_lines: CONCISE_PORCELAIN_MAX_LINES,
- max_inline_chars: CONCISE_PORCELAIN_MAX_INLINE_CHARS,
- }
+fn projection_fault(error: ProjectionError, stage: FaultStage, operation: &str) -> FaultRecord {
+ FaultRecord::new(FaultKind::Internal, stage, operation, error.to_string())
}
const fn full_porcelain_config() -> JsonPorcelainConfig {
diff --git a/crates/fidget-spinner-cli/src/mcp/projection.rs b/crates/fidget-spinner-cli/src/mcp/projection.rs
new file mode 100644
index 0000000..ca89af0
--- /dev/null
+++ b/crates/fidget-spinner-cli/src/mcp/projection.rs
@@ -0,0 +1,1145 @@
+use std::collections::BTreeMap;
+
+use fidget_spinner_core::{
+ AttachmentTargetRef, CommandRecipe, ExperimentAnalysis, ExperimentOutcome, FrontierBrief,
+ FrontierRecord, MetricValue, NonEmptyText, RunDimensionValue,
+};
+use fidget_spinner_store_sqlite::{
+ ArtifactDetail, ArtifactSummary, ExperimentDetail, ExperimentSummary, FrontierOpenProjection,
+ FrontierSummary, HypothesisCurrentState, HypothesisDetail, MetricBestEntry, MetricKeySummary,
+ MetricObservationSummary, ProjectStore, StoreError, VertexSummary,
+};
+use libmcp::{
+ ProjectionError, SelectorProjection, StructuredProjection, SurfaceKind, SurfacePolicy,
+};
+use serde::Serialize;
+use serde_json::Value;
+
+use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
+
+#[derive(Clone, Serialize, libmcp::SelectorProjection)]
+pub(crate) struct HypothesisSelector {
+ pub(crate) slug: String,
+ pub(crate) title: String,
+}
+
+#[derive(Clone, Serialize, libmcp::SelectorProjection)]
+pub(crate) struct ExperimentSelector {
+ pub(crate) slug: String,
+ pub(crate) title: String,
+}
+
+#[derive(Clone, Serialize, libmcp::SelectorProjection)]
+pub(crate) struct FrontierSelector {
+ pub(crate) slug: String,
+ #[libmcp(title)]
+ pub(crate) label: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct FrontierSummaryProjection {
+ pub(crate) slug: String,
+ pub(crate) label: String,
+ pub(crate) objective: String,
+ pub(crate) status: String,
+ pub(crate) active_hypothesis_count: u64,
+ pub(crate) open_experiment_count: u64,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct FrontierBriefProjection {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) situation: Option<String>,
+ pub(crate) roadmap: Vec<RoadmapItemProjection>,
+ pub(crate) unknowns: Vec<String>,
+ pub(crate) revision: u64,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) updated_at: Option<String>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct RoadmapItemProjection {
+ pub(crate) rank: u32,
+ pub(crate) hypothesis: Option<HypothesisRoadmapProjection>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct HypothesisRoadmapProjection {
+ pub(crate) slug: String,
+ pub(crate) title: String,
+ pub(crate) summary: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct FrontierRecordProjection {
+ pub(crate) slug: String,
+ pub(crate) label: String,
+ pub(crate) objective: String,
+ pub(crate) status: String,
+ pub(crate) revision: u64,
+ pub(crate) created_at: String,
+ pub(crate) updated_at: String,
+ pub(crate) brief: FrontierBriefProjection,
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "read")]
+pub(crate) struct FrontierReadOutput {
+ pub(crate) record: FrontierRecordProjection,
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "list")]
+pub(crate) struct FrontierListOutput {
+ pub(crate) count: usize,
+ pub(crate) frontiers: Vec<FrontierSummaryProjection>,
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "overview")]
+pub(crate) struct FrontierOpenOutput {
+ pub(crate) frontier: FrontierOpenFrontierProjection,
+ pub(crate) active_tags: Vec<String>,
+ pub(crate) active_metric_keys: Vec<MetricKeySummaryProjection>,
+ pub(crate) active_hypotheses: Vec<HypothesisCurrentStateProjection>,
+ pub(crate) open_experiments: Vec<ExperimentSummaryProjection>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct FrontierOpenFrontierProjection {
+ pub(crate) slug: String,
+ pub(crate) label: String,
+ pub(crate) objective: String,
+ pub(crate) status: String,
+ pub(crate) brief: FrontierBriefProjection,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct HypothesisSummaryProjection {
+ pub(crate) slug: String,
+ pub(crate) archived: bool,
+ pub(crate) title: String,
+ pub(crate) summary: String,
+ pub(crate) tags: Vec<String>,
+ pub(crate) open_experiment_count: u64,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) latest_verdict: Option<String>,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct HypothesisRecordProjection {
+ pub(crate) slug: String,
+ pub(crate) archived: bool,
+ pub(crate) title: String,
+ pub(crate) summary: String,
+ pub(crate) body: String,
+ pub(crate) tags: Vec<String>,
+ pub(crate) revision: u64,
+ pub(crate) created_at: String,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct HypothesisReadRecordProjection {
+ pub(crate) slug: String,
+ pub(crate) archived: bool,
+ pub(crate) title: String,
+ pub(crate) summary: String,
+ pub(crate) tags: Vec<String>,
+ pub(crate) revision: u64,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct FrontierLinkProjection {
+ pub(crate) slug: String,
+ pub(crate) label: String,
+ pub(crate) status: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct HypothesisDetailConcise {
+ pub(crate) record: HypothesisReadRecordProjection,
+ pub(crate) frontier: FrontierLinkProjection,
+ pub(crate) parents: usize,
+ pub(crate) children: usize,
+ pub(crate) open_experiments: Vec<ExperimentSummaryProjection>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) latest_closed_experiment: Option<ExperimentSummaryProjection>,
+ pub(crate) artifact_count: usize,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct HypothesisDetailFull {
+ pub(crate) record: HypothesisRecordProjection,
+ pub(crate) frontier: FrontierLinkProjection,
+ pub(crate) parents: Vec<VertexSummaryProjection>,
+ pub(crate) children: Vec<VertexSummaryProjection>,
+ pub(crate) open_experiments: Vec<ExperimentSummaryProjection>,
+ pub(crate) closed_experiments: Vec<ExperimentSummaryProjection>,
+ pub(crate) artifacts: Vec<ArtifactSummaryProjection>,
+}
+
+pub(crate) struct HypothesisDetailOutput {
+ concise: HypothesisDetailConcise,
+ full: HypothesisDetailFull,
+}
+
+impl StructuredProjection for HypothesisDetailOutput {
+ fn concise_projection(&self) -> Result<Value, ProjectionError> {
+ Ok(serde_json::to_value(&self.concise)?)
+ }
+
+ fn full_projection(&self) -> Result<Value, ProjectionError> {
+ Ok(serde_json::to_value(&self.full)?)
+ }
+}
+
+impl SurfacePolicy for HypothesisDetailOutput {
+ const KIND: SurfaceKind = SurfaceKind::Read;
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "mutation")]
+pub(crate) struct HypothesisRecordOutput {
+ pub(crate) record: HypothesisRecordProjection,
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "list")]
+pub(crate) struct HypothesisListOutput {
+ pub(crate) count: usize,
+ pub(crate) hypotheses: Vec<HypothesisSummaryProjection>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ExperimentSummaryProjection {
+ pub(crate) slug: String,
+ pub(crate) archived: bool,
+ pub(crate) title: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+ pub(crate) tags: Vec<String>,
+ pub(crate) status: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) verdict: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) primary_metric: Option<MetricObservationSummaryProjection>,
+ pub(crate) updated_at: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) closed_at: Option<String>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ExperimentRecordProjection {
+ pub(crate) slug: String,
+ pub(crate) archived: bool,
+ pub(crate) title: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+ pub(crate) tags: Vec<String>,
+ pub(crate) status: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) outcome: Option<ExperimentOutcomeProjection>,
+ pub(crate) revision: u64,
+ pub(crate) created_at: String,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ExperimentReadRecordProjection {
+ pub(crate) slug: String,
+ pub(crate) archived: bool,
+ pub(crate) title: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+ pub(crate) tags: Vec<String>,
+ pub(crate) status: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) verdict: Option<String>,
+ pub(crate) revision: u64,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ExperimentDetailConcise {
+ pub(crate) record: ExperimentReadRecordProjection,
+ pub(crate) frontier: FrontierLinkProjection,
+ pub(crate) owning_hypothesis: HypothesisSummaryProjection,
+ pub(crate) parents: usize,
+ pub(crate) children: usize,
+ pub(crate) artifact_count: usize,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) outcome: Option<ExperimentOutcomeProjection>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ExperimentDetailFull {
+ pub(crate) record: ExperimentRecordProjection,
+ pub(crate) frontier: FrontierLinkProjection,
+ pub(crate) owning_hypothesis: HypothesisSummaryProjection,
+ pub(crate) parents: Vec<VertexSummaryProjection>,
+ pub(crate) children: Vec<VertexSummaryProjection>,
+ pub(crate) artifacts: Vec<ArtifactSummaryProjection>,
+}
+
+pub(crate) struct ExperimentDetailOutput {
+ concise: ExperimentDetailConcise,
+ full: ExperimentDetailFull,
+}
+
+impl StructuredProjection for ExperimentDetailOutput {
+ fn concise_projection(&self) -> Result<Value, ProjectionError> {
+ Ok(serde_json::to_value(&self.concise)?)
+ }
+
+ fn full_projection(&self) -> Result<Value, ProjectionError> {
+ Ok(serde_json::to_value(&self.full)?)
+ }
+}
+
+impl SurfacePolicy for ExperimentDetailOutput {
+ const KIND: SurfaceKind = SurfaceKind::Read;
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "mutation")]
+pub(crate) struct ExperimentRecordOutput {
+ pub(crate) record: ExperimentRecordProjection,
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "list")]
+pub(crate) struct ExperimentListOutput {
+ pub(crate) count: usize,
+ pub(crate) experiments: Vec<ExperimentSummaryProjection>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ArtifactSummaryProjection {
+ pub(crate) slug: String,
+ pub(crate) kind: String,
+ pub(crate) label: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+ pub(crate) locator: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) media_type: Option<String>,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ArtifactRecordProjection {
+ pub(crate) slug: String,
+ pub(crate) kind: String,
+ pub(crate) label: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+ pub(crate) locator: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) media_type: Option<String>,
+ pub(crate) revision: u64,
+ pub(crate) created_at: String,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ArtifactReadRecordProjection {
+ pub(crate) slug: String,
+ pub(crate) kind: String,
+ pub(crate) label: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+ pub(crate) locator: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) media_type: Option<String>,
+ pub(crate) revision: u64,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ArtifactDetailConcise {
+ pub(crate) record: ArtifactReadRecordProjection,
+ pub(crate) attachment_count: usize,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ArtifactDetailFull {
+ pub(crate) record: ArtifactRecordProjection,
+ pub(crate) attachments: Vec<AttachmentTargetProjection>,
+}
+
+pub(crate) struct ArtifactDetailOutput {
+ concise: ArtifactDetailConcise,
+ full: ArtifactDetailFull,
+}
+
+impl StructuredProjection for ArtifactDetailOutput {
+ fn concise_projection(&self) -> Result<Value, ProjectionError> {
+ Ok(serde_json::to_value(&self.concise)?)
+ }
+
+ fn full_projection(&self) -> Result<Value, ProjectionError> {
+ Ok(serde_json::to_value(&self.full)?)
+ }
+}
+
+impl SurfacePolicy for ArtifactDetailOutput {
+ const KIND: SurfaceKind = SurfaceKind::Read;
+ const REFERENCE_ONLY: bool = true;
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "mutation", reference_only)]
+pub(crate) struct ArtifactRecordOutput {
+ pub(crate) record: ArtifactRecordProjection,
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "list", reference_only)]
+pub(crate) struct ArtifactListOutput {
+ pub(crate) count: usize,
+ pub(crate) artifacts: Vec<ArtifactSummaryProjection>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct HypothesisCurrentStateProjection {
+ pub(crate) hypothesis: HypothesisSummaryProjection,
+ pub(crate) open_experiments: Vec<ExperimentSummaryProjection>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) latest_closed_experiment: Option<ExperimentSummaryProjection>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct MetricKeySummaryProjection {
+ pub(crate) key: String,
+ pub(crate) unit: String,
+ pub(crate) objective: String,
+ pub(crate) visibility: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) description: Option<String>,
+ pub(crate) reference_count: u64,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct MetricBestEntryProjection {
+ pub(crate) experiment: ExperimentSummaryProjection,
+ pub(crate) hypothesis: HypothesisSummaryProjection,
+ pub(crate) value: f64,
+ pub(crate) dimensions: BTreeMap<String, Value>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct MetricObservationSummaryProjection {
+ pub(crate) key: String,
+ pub(crate) value: f64,
+ pub(crate) unit: String,
+ pub(crate) objective: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ExperimentOutcomeProjection {
+ pub(crate) backend: String,
+ pub(crate) command: CommandRecipeProjection,
+ pub(crate) dimensions: BTreeMap<String, Value>,
+ pub(crate) primary_metric: MetricValueProjection,
+ pub(crate) supporting_metrics: Vec<MetricValueProjection>,
+ pub(crate) verdict: String,
+ pub(crate) rationale: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) analysis: Option<ExperimentAnalysisProjection>,
+ pub(crate) closed_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct ExperimentAnalysisProjection {
+ pub(crate) summary: String,
+ pub(crate) body: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct MetricValueProjection {
+ pub(crate) key: String,
+ pub(crate) value: f64,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct CommandRecipeProjection {
+ pub(crate) argv: Vec<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) working_directory: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) env: Option<BTreeMap<String, String>>,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct VertexSummaryProjection {
+ pub(crate) kind: String,
+ pub(crate) slug: String,
+ pub(crate) archived: bool,
+ pub(crate) title: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+ pub(crate) updated_at: String,
+}
+
+#[derive(Clone, Serialize)]
+pub(crate) struct AttachmentTargetProjection {
+ pub(crate) kind: String,
+ pub(crate) slug: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) title: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) label: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) summary: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) status: Option<String>,
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "list")]
+pub(crate) struct MetricKeysOutput {
+ pub(crate) count: usize,
+ pub(crate) metrics: Vec<MetricKeySummaryProjection>,
+}
+
+#[derive(Clone, Serialize, libmcp::ToolProjection)]
+#[libmcp(kind = "list")]
+pub(crate) struct MetricBestOutput {
+ pub(crate) count: usize,
+ pub(crate) entries: Vec<MetricBestEntryProjection>,
+}
+
+pub(crate) fn frontier_list(frontiers: &[FrontierSummary]) -> FrontierListOutput {
+ FrontierListOutput {
+ count: frontiers.len(),
+ frontiers: frontiers.iter().map(frontier_summary).collect(),
+ }
+}
+
+pub(crate) fn frontier_record(
+ store: &ProjectStore,
+ frontier: &FrontierRecord,
+ operation: &str,
+) -> Result<FrontierReadOutput, FaultRecord> {
+ Ok(FrontierReadOutput {
+ record: frontier_record_projection(store, frontier, operation)?,
+ })
+}
+
+pub(crate) fn frontier_open(projection: &FrontierOpenProjection) -> FrontierOpenOutput {
+ FrontierOpenOutput {
+ frontier: FrontierOpenFrontierProjection {
+ slug: projection.frontier.slug.to_string(),
+ label: projection.frontier.label.to_string(),
+ objective: projection.frontier.objective.to_string(),
+ status: projection.frontier.status.as_str().to_owned(),
+ brief: frontier_brief_projection(
+ &projection.frontier.brief,
+ projection
+ .frontier
+ .brief
+ .roadmap
+ .iter()
+ .map(|item| {
+ let hypothesis = projection
+ .active_hypotheses
+ .iter()
+ .find(|state| state.hypothesis.id == item.hypothesis_id)
+ .map(|state| HypothesisRoadmapProjection {
+ slug: state.hypothesis.slug.to_string(),
+ title: state.hypothesis.title.to_string(),
+ summary: state.hypothesis.summary.to_string(),
+ });
+ RoadmapItemProjection {
+ rank: item.rank,
+ hypothesis,
+ summary: item.summary.as_ref().map(ToString::to_string),
+ }
+ })
+ .collect(),
+ ),
+ },
+ active_tags: projection
+ .active_tags
+ .iter()
+ .map(ToString::to_string)
+ .collect(),
+ active_metric_keys: projection
+ .active_metric_keys
+ .iter()
+ .map(metric_key_summary)
+ .collect(),
+ active_hypotheses: projection
+ .active_hypotheses
+ .iter()
+ .map(hypothesis_current_state)
+ .collect(),
+ open_experiments: projection
+ .open_experiments
+ .iter()
+ .map(experiment_summary)
+ .collect(),
+ }
+}
+
+pub(crate) fn hypothesis_record(
+ hypothesis: &fidget_spinner_core::HypothesisRecord,
+) -> HypothesisRecordOutput {
+ HypothesisRecordOutput {
+ record: hypothesis_record_projection(hypothesis),
+ }
+}
+
+pub(crate) fn hypothesis_list(
+ hypotheses: &[fidget_spinner_store_sqlite::HypothesisSummary],
+) -> HypothesisListOutput {
+ HypothesisListOutput {
+ count: hypotheses.len(),
+ hypotheses: hypotheses.iter().map(hypothesis_summary).collect(),
+ }
+}
+
+pub(crate) fn hypothesis_detail(
+ store: &ProjectStore,
+ detail: &HypothesisDetail,
+ operation: &str,
+) -> Result<HypothesisDetailOutput, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ let frontier = FrontierLinkProjection {
+ slug: frontier.slug.to_string(),
+ label: frontier.label.to_string(),
+ status: frontier.status.as_str().to_owned(),
+ };
+ Ok(HypothesisDetailOutput {
+ concise: HypothesisDetailConcise {
+ record: HypothesisReadRecordProjection {
+ slug: detail.record.slug.to_string(),
+ archived: detail.record.archived,
+ title: detail.record.title.to_string(),
+ summary: detail.record.summary.to_string(),
+ tags: detail.record.tags.iter().map(ToString::to_string).collect(),
+ revision: detail.record.revision,
+ updated_at: timestamp_value(detail.record.updated_at),
+ },
+ frontier: frontier.clone(),
+ parents: detail.parents.len(),
+ children: detail.children.len(),
+ open_experiments: detail
+ .open_experiments
+ .iter()
+ .map(experiment_summary)
+ .collect(),
+ latest_closed_experiment: detail.closed_experiments.first().map(experiment_summary),
+ artifact_count: detail.artifacts.len(),
+ },
+ full: HypothesisDetailFull {
+ record: hypothesis_record_projection(&detail.record),
+ frontier,
+ parents: detail.parents.iter().map(vertex_summary).collect(),
+ children: detail.children.iter().map(vertex_summary).collect(),
+ open_experiments: detail
+ .open_experiments
+ .iter()
+ .map(experiment_summary)
+ .collect(),
+ closed_experiments: detail
+ .closed_experiments
+ .iter()
+ .map(experiment_summary)
+ .collect(),
+ artifacts: detail.artifacts.iter().map(artifact_summary).collect(),
+ },
+ })
+}
+
+pub(crate) fn experiment_record(
+ experiment: &fidget_spinner_core::ExperimentRecord,
+) -> ExperimentRecordOutput {
+ ExperimentRecordOutput {
+ record: experiment_record_projection(experiment),
+ }
+}
+
+pub(crate) fn experiment_list(experiments: &[ExperimentSummary]) -> ExperimentListOutput {
+ ExperimentListOutput {
+ count: experiments.len(),
+ experiments: experiments.iter().map(experiment_summary).collect(),
+ }
+}
+
+pub(crate) fn experiment_detail(
+ store: &ProjectStore,
+ detail: &ExperimentDetail,
+ operation: &str,
+) -> Result<ExperimentDetailOutput, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ let frontier = FrontierLinkProjection {
+ slug: frontier.slug.to_string(),
+ label: frontier.label.to_string(),
+ status: frontier.status.as_str().to_owned(),
+ };
+ Ok(ExperimentDetailOutput {
+ concise: ExperimentDetailConcise {
+ record: ExperimentReadRecordProjection {
+ slug: detail.record.slug.to_string(),
+ archived: detail.record.archived,
+ title: detail.record.title.to_string(),
+ summary: detail.record.summary.as_ref().map(ToString::to_string),
+ tags: detail.record.tags.iter().map(ToString::to_string).collect(),
+ status: detail.record.status.as_str().to_owned(),
+ verdict: detail
+ .record
+ .outcome
+ .as_ref()
+ .map(|outcome| outcome.verdict.as_str().to_owned()),
+ revision: detail.record.revision,
+ updated_at: timestamp_value(detail.record.updated_at),
+ },
+ frontier: frontier.clone(),
+ owning_hypothesis: hypothesis_summary(&detail.owning_hypothesis),
+ parents: detail.parents.len(),
+ children: detail.children.len(),
+ artifact_count: detail.artifacts.len(),
+ outcome: detail.record.outcome.as_ref().map(experiment_outcome),
+ },
+ full: ExperimentDetailFull {
+ record: experiment_record_projection(&detail.record),
+ frontier,
+ owning_hypothesis: hypothesis_summary(&detail.owning_hypothesis),
+ parents: detail.parents.iter().map(vertex_summary).collect(),
+ children: detail.children.iter().map(vertex_summary).collect(),
+ artifacts: detail.artifacts.iter().map(artifact_summary).collect(),
+ },
+ })
+}
+
+pub(crate) fn artifact_record(
+ artifact: &fidget_spinner_core::ArtifactRecord,
+) -> ArtifactRecordOutput {
+ ArtifactRecordOutput {
+ record: artifact_record_projection(artifact),
+ }
+}
+
+pub(crate) fn artifact_list(artifacts: &[ArtifactSummary]) -> ArtifactListOutput {
+ ArtifactListOutput {
+ count: artifacts.len(),
+ artifacts: artifacts.iter().map(artifact_summary).collect(),
+ }
+}
+
+pub(crate) fn artifact_detail(
+ store: &ProjectStore,
+ detail: &ArtifactDetail,
+ operation: &str,
+) -> Result<ArtifactDetailOutput, FaultRecord> {
+ let attachments = detail
+ .attachments
+ .iter()
+ .copied()
+ .map(|attachment| attachment_target(store, attachment, operation))
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(ArtifactDetailOutput {
+ concise: ArtifactDetailConcise {
+ record: ArtifactReadRecordProjection {
+ slug: detail.record.slug.to_string(),
+ kind: detail.record.kind.as_str().to_owned(),
+ label: detail.record.label.to_string(),
+ summary: detail.record.summary.as_ref().map(ToString::to_string),
+ locator: detail.record.locator.to_string(),
+ media_type: detail.record.media_type.as_ref().map(ToString::to_string),
+ revision: detail.record.revision,
+ updated_at: timestamp_value(detail.record.updated_at),
+ },
+ attachment_count: detail.attachments.len(),
+ },
+ full: ArtifactDetailFull {
+ record: artifact_record_projection(&detail.record),
+ attachments,
+ },
+ })
+}
+
+pub(crate) fn metric_keys(keys: &[MetricKeySummary]) -> MetricKeysOutput {
+ MetricKeysOutput {
+ count: keys.len(),
+ metrics: keys.iter().map(metric_key_summary).collect(),
+ }
+}
+
+pub(crate) fn metric_best(entries: &[MetricBestEntry]) -> MetricBestOutput {
+ MetricBestOutput {
+ count: entries.len(),
+ entries: entries.iter().map(metric_best_entry).collect(),
+ }
+}
+
+fn frontier_summary(frontier: &FrontierSummary) -> FrontierSummaryProjection {
+ FrontierSummaryProjection {
+ slug: frontier.slug.to_string(),
+ label: frontier.label.to_string(),
+ objective: frontier.objective.to_string(),
+ status: frontier.status.as_str().to_owned(),
+ active_hypothesis_count: frontier.active_hypothesis_count,
+ open_experiment_count: frontier.open_experiment_count,
+ updated_at: timestamp_value(frontier.updated_at),
+ }
+}
+
+fn frontier_record_projection(
+ store: &ProjectStore,
+ frontier: &FrontierRecord,
+ operation: &str,
+) -> Result<FrontierRecordProjection, FaultRecord> {
+ let roadmap = frontier
+ .brief
+ .roadmap
+ .iter()
+ .map(|item| {
+ let hypothesis = store
+ .read_hypothesis(&item.hypothesis_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(RoadmapItemProjection {
+ rank: item.rank,
+ hypothesis: Some(HypothesisRoadmapProjection {
+ slug: hypothesis.record.slug.to_string(),
+ title: hypothesis.record.title.to_string(),
+ summary: hypothesis.record.summary.to_string(),
+ }),
+ summary: item.summary.as_ref().map(ToString::to_string),
+ })
+ })
+ .collect::<Result<Vec<_>, FaultRecord>>()?;
+ Ok(FrontierRecordProjection {
+ slug: frontier.slug.to_string(),
+ label: frontier.label.to_string(),
+ objective: frontier.objective.to_string(),
+ status: frontier.status.as_str().to_owned(),
+ revision: frontier.revision,
+ created_at: timestamp_value(frontier.created_at),
+ updated_at: timestamp_value(frontier.updated_at),
+ brief: frontier_brief_projection(&frontier.brief, roadmap),
+ })
+}
+
+fn frontier_brief_projection(
+ brief: &FrontierBrief,
+ roadmap: Vec<RoadmapItemProjection>,
+) -> FrontierBriefProjection {
+ FrontierBriefProjection {
+ situation: brief.situation.as_ref().map(ToString::to_string),
+ roadmap,
+ unknowns: brief.unknowns.iter().map(ToString::to_string).collect(),
+ revision: brief.revision,
+ updated_at: brief.updated_at.map(timestamp_value),
+ }
+}
+
+fn hypothesis_summary(
+ hypothesis: &fidget_spinner_store_sqlite::HypothesisSummary,
+) -> HypothesisSummaryProjection {
+ HypothesisSummaryProjection {
+ slug: hypothesis.slug.to_string(),
+ archived: hypothesis.archived,
+ title: hypothesis.title.to_string(),
+ summary: hypothesis.summary.to_string(),
+ tags: hypothesis.tags.iter().map(ToString::to_string).collect(),
+ open_experiment_count: hypothesis.open_experiment_count,
+ latest_verdict: hypothesis
+ .latest_verdict
+ .map(|verdict| verdict.as_str().to_owned()),
+ updated_at: timestamp_value(hypothesis.updated_at),
+ }
+}
+
+fn hypothesis_record_projection(
+ hypothesis: &fidget_spinner_core::HypothesisRecord,
+) -> HypothesisRecordProjection {
+ HypothesisRecordProjection {
+ slug: hypothesis.slug.to_string(),
+ archived: hypothesis.archived,
+ title: hypothesis.title.to_string(),
+ summary: hypothesis.summary.to_string(),
+ body: hypothesis.body.to_string(),
+ tags: hypothesis.tags.iter().map(ToString::to_string).collect(),
+ revision: hypothesis.revision,
+ created_at: timestamp_value(hypothesis.created_at),
+ updated_at: timestamp_value(hypothesis.updated_at),
+ }
+}
+
+fn experiment_summary(experiment: &ExperimentSummary) -> ExperimentSummaryProjection {
+ ExperimentSummaryProjection {
+ slug: experiment.slug.to_string(),
+ archived: experiment.archived,
+ title: experiment.title.to_string(),
+ summary: experiment.summary.as_ref().map(ToString::to_string),
+ tags: experiment.tags.iter().map(ToString::to_string).collect(),
+ status: experiment.status.as_str().to_owned(),
+ verdict: experiment
+ .verdict
+ .map(|verdict| verdict.as_str().to_owned()),
+ primary_metric: experiment
+ .primary_metric
+ .as_ref()
+ .map(metric_observation_summary),
+ updated_at: timestamp_value(experiment.updated_at),
+ closed_at: experiment.closed_at.map(timestamp_value),
+ }
+}
+
+fn experiment_record_projection(
+ experiment: &fidget_spinner_core::ExperimentRecord,
+) -> ExperimentRecordProjection {
+ ExperimentRecordProjection {
+ slug: experiment.slug.to_string(),
+ archived: experiment.archived,
+ title: experiment.title.to_string(),
+ summary: experiment.summary.as_ref().map(ToString::to_string),
+ tags: experiment.tags.iter().map(ToString::to_string).collect(),
+ status: experiment.status.as_str().to_owned(),
+ outcome: experiment.outcome.as_ref().map(experiment_outcome),
+ revision: experiment.revision,
+ created_at: timestamp_value(experiment.created_at),
+ updated_at: timestamp_value(experiment.updated_at),
+ }
+}
+
+fn artifact_summary(artifact: &ArtifactSummary) -> ArtifactSummaryProjection {
+ ArtifactSummaryProjection {
+ slug: artifact.slug.to_string(),
+ kind: artifact.kind.as_str().to_owned(),
+ label: artifact.label.to_string(),
+ summary: artifact.summary.as_ref().map(ToString::to_string),
+ locator: artifact.locator.to_string(),
+ media_type: artifact.media_type.as_ref().map(ToString::to_string),
+ updated_at: timestamp_value(artifact.updated_at),
+ }
+}
+
+fn artifact_record_projection(
+ artifact: &fidget_spinner_core::ArtifactRecord,
+) -> ArtifactRecordProjection {
+ ArtifactRecordProjection {
+ slug: artifact.slug.to_string(),
+ kind: artifact.kind.as_str().to_owned(),
+ label: artifact.label.to_string(),
+ summary: artifact.summary.as_ref().map(ToString::to_string),
+ locator: artifact.locator.to_string(),
+ media_type: artifact.media_type.as_ref().map(ToString::to_string),
+ revision: artifact.revision,
+ created_at: timestamp_value(artifact.created_at),
+ updated_at: timestamp_value(artifact.updated_at),
+ }
+}
+
+fn hypothesis_current_state(state: &HypothesisCurrentState) -> HypothesisCurrentStateProjection {
+ HypothesisCurrentStateProjection {
+ hypothesis: hypothesis_summary(&state.hypothesis),
+ open_experiments: state
+ .open_experiments
+ .iter()
+ .map(experiment_summary)
+ .collect(),
+ latest_closed_experiment: state
+ .latest_closed_experiment
+ .as_ref()
+ .map(experiment_summary),
+ }
+}
+
+fn metric_key_summary(metric: &MetricKeySummary) -> MetricKeySummaryProjection {
+ MetricKeySummaryProjection {
+ key: metric.key.to_string(),
+ unit: metric.unit.as_str().to_owned(),
+ objective: metric.objective.as_str().to_owned(),
+ visibility: metric.visibility.as_str().to_owned(),
+ description: metric.description.as_ref().map(ToString::to_string),
+ reference_count: metric.reference_count,
+ }
+}
+
+fn metric_best_entry(entry: &MetricBestEntry) -> MetricBestEntryProjection {
+ MetricBestEntryProjection {
+ experiment: experiment_summary(&entry.experiment),
+ hypothesis: hypothesis_summary(&entry.hypothesis),
+ value: entry.value,
+ dimensions: dimension_map(&entry.dimensions),
+ }
+}
+
+fn metric_observation_summary(
+ metric: &MetricObservationSummary,
+) -> MetricObservationSummaryProjection {
+ MetricObservationSummaryProjection {
+ key: metric.key.to_string(),
+ value: metric.value,
+ unit: metric.unit.as_str().to_owned(),
+ objective: metric.objective.as_str().to_owned(),
+ }
+}
+
+fn experiment_outcome(outcome: &ExperimentOutcome) -> ExperimentOutcomeProjection {
+ ExperimentOutcomeProjection {
+ backend: outcome.backend.as_str().to_owned(),
+ command: command_recipe(&outcome.command),
+ dimensions: dimension_map(&outcome.dimensions),
+ primary_metric: metric_value(&outcome.primary_metric),
+ supporting_metrics: outcome
+ .supporting_metrics
+ .iter()
+ .map(metric_value)
+ .collect(),
+ verdict: outcome.verdict.as_str().to_owned(),
+ rationale: outcome.rationale.to_string(),
+ analysis: outcome.analysis.as_ref().map(experiment_analysis),
+ closed_at: timestamp_value(outcome.closed_at),
+ }
+}
+
+fn experiment_analysis(analysis: &ExperimentAnalysis) -> ExperimentAnalysisProjection {
+ ExperimentAnalysisProjection {
+ summary: analysis.summary.to_string(),
+ body: analysis.body.to_string(),
+ }
+}
+
+fn metric_value(metric: &MetricValue) -> MetricValueProjection {
+ MetricValueProjection {
+ key: metric.key.to_string(),
+ value: metric.value,
+ }
+}
+
+fn command_recipe(command: &CommandRecipe) -> CommandRecipeProjection {
+ CommandRecipeProjection {
+ argv: command.argv.iter().map(ToString::to_string).collect(),
+ working_directory: command.working_directory.as_ref().map(ToString::to_string),
+ env: (!command.env.is_empty()).then(|| {
+ command
+ .env
+ .iter()
+ .map(|(key, value)| (key.clone(), value.clone()))
+ .collect()
+ }),
+ }
+}
+
+fn dimension_map(
+ dimensions: &BTreeMap<NonEmptyText, RunDimensionValue>,
+) -> BTreeMap<String, Value> {
+ dimensions
+ .iter()
+ .map(|(key, value)| (key.to_string(), run_dimension_value(value)))
+ .collect()
+}
+
+fn run_dimension_value(value: &RunDimensionValue) -> Value {
+ match value {
+ RunDimensionValue::String(value) => Value::String(value.to_string()),
+ RunDimensionValue::Numeric(value) => serde_json::json!(value),
+ RunDimensionValue::Boolean(value) => serde_json::json!(value),
+ RunDimensionValue::Timestamp(value) => Value::String(value.to_string()),
+ }
+}
+
+fn vertex_summary(vertex: &VertexSummary) -> VertexSummaryProjection {
+ VertexSummaryProjection {
+ kind: vertex.vertex.kind().as_str().to_owned(),
+ slug: vertex.slug.to_string(),
+ archived: vertex.archived,
+ title: vertex.title.to_string(),
+ summary: vertex.summary.as_ref().map(ToString::to_string),
+ updated_at: timestamp_value(vertex.updated_at),
+ }
+}
+
+fn attachment_target(
+ store: &ProjectStore,
+ attachment: AttachmentTargetRef,
+ operation: &str,
+) -> Result<AttachmentTargetProjection, FaultRecord> {
+ match attachment {
+ AttachmentTargetRef::Frontier(id) => {
+ let frontier = store
+ .read_frontier(&id.to_string())
+ .map_err(store_fault(operation))?;
+ let reference = FrontierSelector {
+ slug: frontier.slug.to_string(),
+ label: frontier.label.to_string(),
+ };
+ let selector = reference.selector_ref();
+ Ok(AttachmentTargetProjection {
+ kind: "frontier".to_owned(),
+ slug: selector.slug,
+ title: None,
+ label: selector.title,
+ summary: None,
+ status: Some(frontier.status.as_str().to_owned()),
+ })
+ }
+ AttachmentTargetRef::Hypothesis(id) => {
+ let hypothesis = store
+ .read_hypothesis(&id.to_string())
+ .map_err(store_fault(operation))?;
+ let reference = HypothesisSelector {
+ slug: hypothesis.record.slug.to_string(),
+ title: hypothesis.record.title.to_string(),
+ };
+ let selector = reference.selector_ref();
+ Ok(AttachmentTargetProjection {
+ kind: "hypothesis".to_owned(),
+ slug: selector.slug,
+ title: selector.title,
+ label: None,
+ summary: Some(hypothesis.record.summary.to_string()),
+ status: None,
+ })
+ }
+ AttachmentTargetRef::Experiment(id) => {
+ let experiment = store
+ .read_experiment(&id.to_string())
+ .map_err(store_fault(operation))?;
+ let reference = ExperimentSelector {
+ slug: experiment.record.slug.to_string(),
+ title: experiment.record.title.to_string(),
+ };
+ let selector = reference.selector_ref();
+ Ok(AttachmentTargetProjection {
+ kind: "experiment".to_owned(),
+ slug: selector.slug,
+ title: selector.title,
+ label: None,
+ summary: experiment.record.summary.as_ref().map(ToString::to_string),
+ status: None,
+ })
+ }
+ }
+}
+
+fn timestamp_value(timestamp: time::OffsetDateTime) -> String {
+ timestamp
+ .format(&time::format_description::well_known::Rfc3339)
+ .unwrap_or_else(|_| timestamp.unix_timestamp().to_string())
+}
+
+fn store_fault(operation: &str) -> impl Fn(StoreError) -> FaultRecord + '_ {
+ move |error| {
+ FaultRecord::new(
+ FaultKind::Internal,
+ FaultStage::Store,
+ operation,
+ error.to_string(),
+ )
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/mcp/service.rs b/crates/fidget-spinner-cli/src/mcp/service.rs
index d500156..3ce68ae 100644
--- a/crates/fidget-spinner-cli/src/mcp/service.rs
+++ b/crates/fidget-spinner-cli/src/mcp/service.rs
@@ -7,8 +7,8 @@ use std::time::UNIX_EPOCH;
use camino::{Utf8Path, Utf8PathBuf};
use fidget_spinner_core::{
- ArtifactKind, CommandRecipe, ExecutionBackend, ExperimentAnalysis, ExperimentStatus,
- FieldValueType, FrontierVerdict, MetricUnit, MetricVisibility, NonEmptyText,
+ ArtifactKind, AttachmentTargetRef, CommandRecipe, ExecutionBackend, ExperimentAnalysis,
+ ExperimentStatus, FieldValueType, FrontierVerdict, MetricUnit, MetricVisibility, NonEmptyText,
OptimizationObjective, RunDimensionValue, Slug, TagName,
};
use fidget_spinner_store_sqlite::{
@@ -19,15 +19,17 @@ use fidget_spinner_store_sqlite::{
MetricBestQuery, MetricKeySummary, MetricKeysQuery, MetricRankOrder, MetricScope,
OpenExperimentRequest, ProjectStatus, ProjectStore, StoreError, TextPatch,
UpdateArtifactRequest, UpdateExperimentRequest, UpdateFrontierBriefRequest,
- UpdateHypothesisRequest, VertexSelector,
+ UpdateHypothesisRequest, VertexSelector, VertexSummary,
};
use serde::Deserialize;
use serde_json::{Map, Value, json};
use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
use crate::mcp::output::{
- ToolOutput, detailed_tool_output, split_presentation, tool_output, tool_success,
+ ToolOutput, fallback_detailed_tool_output, fallback_tool_output, projected_tool_output,
+ split_presentation, tool_success,
};
+use crate::mcp::projection;
use crate::mcp::protocol::{TRANSIENT_ONCE_ENV, TRANSIENT_ONCE_MARKER_ENV, WorkerOperation};
pub(crate) struct WorkerService {
@@ -101,7 +103,13 @@ impl WorkerService {
TagName::new(args.name).map_err(store_fault(&operation))?,
NonEmptyText::new(args.description).map_err(store_fault(&operation))?,
));
- tool_output(&tag, FaultStage::Worker, &operation)?
+ fallback_tool_output(
+ &tag,
+ &tag,
+ libmcp::SurfaceKind::Mutation,
+ FaultStage::Worker,
+ &operation,
+ )?
}
"tag.list" => tag_list_output(&lift!(self.store.list_tags()), &operation)?,
"frontier.create" => {
@@ -118,7 +126,7 @@ impl WorkerService {
.map_err(store_fault(&operation))?,
})
);
- frontier_record_output(&frontier, &operation)?
+ frontier_record_output(&self.store, &frontier, &operation)?
}
"frontier.list" => {
frontier_list_output(&lift!(self.store.list_frontiers()), &operation)?
@@ -126,6 +134,7 @@ impl WorkerService {
"frontier.read" => {
let args = deserialize::<FrontierSelectorArgs>(arguments)?;
frontier_record_output(
+ &self.store,
&lift!(self.store.read_frontier(&args.frontier)),
&operation,
)?
@@ -173,7 +182,7 @@ impl WorkerService {
.transpose()?,
})
);
- frontier_record_output(&frontier, &operation)?
+ frontier_record_output(&self.store, &frontier, &operation)?
}
"frontier.history" => {
let args = deserialize::<FrontierSelectorArgs>(arguments)?;
@@ -219,6 +228,7 @@ impl WorkerService {
"hypothesis.read" => {
let args = deserialize::<HypothesisSelectorArgs>(arguments)?;
hypothesis_detail_output(
+ &self.store,
&lift!(self.store.read_hypothesis(&args.hypothesis)),
&operation,
)?
@@ -303,6 +313,7 @@ impl WorkerService {
"experiment.read" => {
let args = deserialize::<ExperimentSelectorArgs>(arguments)?;
experiment_detail_output(
+ &self.store,
&lift!(self.store.read_experiment(&args.experiment)),
&operation,
)?
@@ -409,6 +420,7 @@ impl WorkerService {
"artifact.read" => {
let args = deserialize::<ArtifactSelectorArgs>(arguments)?;
artifact_detail_output(
+ &self.store,
&lift!(self.store.read_artifact(&args.artifact)),
&operation,
)?
@@ -446,20 +458,23 @@ impl WorkerService {
}
"metric.define" => {
let args = deserialize::<MetricDefineArgs>(arguments)?;
- tool_output(
- &lift!(
- self.store.define_metric(DefineMetricRequest {
- key: NonEmptyText::new(args.key).map_err(store_fault(&operation))?,
- unit: args.unit,
- objective: args.objective,
- visibility: args.visibility.unwrap_or(MetricVisibility::Canonical),
- description: args
- .description
- .map(NonEmptyText::new)
- .transpose()
- .map_err(store_fault(&operation))?,
- })
- ),
+ let metric = lift!(
+ self.store.define_metric(DefineMetricRequest {
+ key: NonEmptyText::new(args.key).map_err(store_fault(&operation))?,
+ unit: args.unit,
+ objective: args.objective,
+ visibility: args.visibility.unwrap_or(MetricVisibility::Canonical),
+ description: args
+ .description
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault(&operation))?,
+ })
+ );
+ fallback_tool_output(
+ &metric,
+ &metric,
+ libmcp::SurfaceKind::Mutation,
FaultStage::Worker,
&operation,
)?
@@ -491,27 +506,35 @@ impl WorkerService {
}
"run.dimension.define" => {
let args = deserialize::<DimensionDefineArgs>(arguments)?;
- tool_output(
- &lift!(
- self.store.define_run_dimension(DefineRunDimensionRequest {
- key: NonEmptyText::new(args.key).map_err(store_fault(&operation))?,
- value_type: args.value_type,
- description: args
- .description
- .map(NonEmptyText::new)
- .transpose()
- .map_err(store_fault(&operation))?,
- })
- ),
+ let dimension = lift!(
+ self.store.define_run_dimension(DefineRunDimensionRequest {
+ key: NonEmptyText::new(args.key).map_err(store_fault(&operation))?,
+ value_type: args.value_type,
+ description: args
+ .description
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault(&operation))?,
+ })
+ );
+ fallback_tool_output(
+ &dimension,
+ &dimension,
+ libmcp::SurfaceKind::Mutation,
+ FaultStage::Worker,
+ &operation,
+ )?
+ }
+ "run.dimension.list" => {
+ let dimensions = lift!(self.store.list_run_dimensions());
+ fallback_tool_output(
+ &dimensions,
+ &dimensions,
+ libmcp::SurfaceKind::List,
FaultStage::Worker,
&operation,
)?
}
- "run.dimension.list" => tool_output(
- &lift!(self.store.list_run_dimensions()),
- FaultStage::Worker,
- &operation,
- )?,
other => {
return Err(FaultRecord::new(
FaultKind::InvalidInput,
@@ -1019,7 +1042,7 @@ fn project_status_output(
"open_experiment_count": status.open_experiment_count,
"artifact_count": status.artifact_count,
});
- detailed_tool_output(
+ fallback_detailed_tool_output(
&concise,
status,
[
@@ -1035,6 +1058,7 @@ fn project_status_output(
]
.join("\n"),
None,
+ libmcp::SurfaceKind::Overview,
FaultStage::Worker,
operation,
)
@@ -1048,7 +1072,7 @@ fn tag_list_output(
"count": tags.len(),
"tags": tags,
});
- detailed_tool_output(
+ fallback_detailed_tool_output(
&concise,
&concise,
if tags.is_empty() {
@@ -1060,6 +1084,7 @@ fn tag_list_output(
.join("\n")
},
None,
+ libmcp::SurfaceKind::List,
FaultStage::Worker,
operation,
)
@@ -1069,10 +1094,9 @@ fn frontier_list_output(
frontiers: &[FrontierSummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": frontiers.len(), "frontiers": frontiers });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::frontier_list(frontiers);
+ projected_tool_output(
+ &projection,
if frontiers.is_empty() {
"no frontiers".to_owned()
} else {
@@ -1097,9 +1121,11 @@ fn frontier_list_output(
}
fn frontier_record_output(
+ store: &ProjectStore,
frontier: &fidget_spinner_core::FrontierRecord,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::frontier_record(store, frontier, operation)?;
let mut lines = vec![format!(
"frontier {} — {}",
frontier.slug, frontier.objective
@@ -1133,9 +1159,8 @@ fn frontier_record_output(
.join("; ")
));
}
- detailed_tool_output(
- &frontier,
- frontier,
+ projected_tool_output(
+ &projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1147,6 +1172,7 @@ fn frontier_open_output(
projection: &FrontierOpenProjection,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let output_projection = projection::frontier_open(projection);
let mut lines = vec![format!(
"frontier {} — {}",
projection.frontier.slug, projection.frontier.objective
@@ -1209,9 +1235,8 @@ fn frontier_open_output(
));
}
}
- detailed_tool_output(
- projection,
- projection,
+ projected_tool_output(
+ &output_projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1223,9 +1248,9 @@ fn hypothesis_record_output(
hypothesis: &fidget_spinner_core::HypothesisRecord,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- detailed_tool_output(
- hypothesis,
- hypothesis,
+ let projection = projection::hypothesis_record(hypothesis);
+ projected_tool_output(
+ &projection,
format!("hypothesis {} — {}", hypothesis.slug, hypothesis.summary),
None,
FaultStage::Worker,
@@ -1237,10 +1262,9 @@ fn hypothesis_list_output(
hypotheses: &[fidget_spinner_store_sqlite::HypothesisSummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": hypotheses.len(), "hypotheses": hypotheses });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::hypothesis_list(hypotheses);
+ projected_tool_output(
+ &projection,
if hypotheses.is_empty() {
"no hypotheses".to_owned()
} else {
@@ -1269,9 +1293,11 @@ fn hypothesis_list_output(
}
fn hypothesis_detail_output(
+ store: &ProjectStore,
detail: &fidget_spinner_store_sqlite::HypothesisDetail,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::hypothesis_detail(store, detail, operation)?;
let mut lines = vec![
format!(
"hypothesis {} — {}",
@@ -1299,9 +1325,8 @@ fn hypothesis_detail_output(
detail.closed_experiments.len(),
detail.artifacts.len()
));
- detailed_tool_output(
- detail,
- detail,
+ projected_tool_output(
+ &projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1313,6 +1338,7 @@ fn experiment_record_output(
experiment: &fidget_spinner_core::ExperimentRecord,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::experiment_record(experiment);
let mut line = format!("experiment {} — {}", experiment.slug, experiment.title);
if let Some(outcome) = experiment.outcome.as_ref() {
let _ = write!(
@@ -1325,24 +1351,16 @@ fn experiment_record_output(
} else {
let _ = write!(line, " | open");
}
- detailed_tool_output(
- experiment,
- experiment,
- line,
- None,
- FaultStage::Worker,
- operation,
- )
+ projected_tool_output(&projection, line, None, FaultStage::Worker, operation)
}
fn experiment_list_output(
experiments: &[fidget_spinner_store_sqlite::ExperimentSummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": experiments.len(), "experiments": experiments });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::experiment_list(experiments);
+ projected_tool_output(
+ &projection,
if experiments.is_empty() {
"no experiments".to_owned()
} else {
@@ -1374,9 +1392,11 @@ fn experiment_list_output(
}
fn experiment_detail_output(
+ store: &ProjectStore,
detail: &fidget_spinner_store_sqlite::ExperimentDetail,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::experiment_detail(store, detail, operation)?;
let mut lines = vec![format!(
"experiment {} — {}",
detail.record.slug, detail.record.title
@@ -1402,9 +1422,8 @@ fn experiment_detail_output(
detail.children.len(),
detail.artifacts.len()
));
- detailed_tool_output(
- detail,
- detail,
+ projected_tool_output(
+ &projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1416,9 +1435,9 @@ fn artifact_record_output(
artifact: &fidget_spinner_core::ArtifactRecord,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- detailed_tool_output(
- artifact,
- artifact,
+ let projection = projection::artifact_record(artifact);
+ projected_tool_output(
+ &projection,
format!(
"artifact {} — {} -> {}",
artifact.slug, artifact.label, artifact.locator
@@ -1433,10 +1452,9 @@ fn artifact_list_output(
artifacts: &[fidget_spinner_store_sqlite::ArtifactSummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": artifacts.len(), "artifacts": artifacts });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::artifact_list(artifacts);
+ projected_tool_output(
+ &projection,
if artifacts.is_empty() {
"no artifacts".to_owned()
} else {
@@ -1458,9 +1476,11 @@ fn artifact_list_output(
}
fn artifact_detail_output(
+ store: &ProjectStore,
detail: &fidget_spinner_store_sqlite::ArtifactDetail,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::artifact_detail(store, detail, operation)?;
let mut lines = vec![format!(
"artifact {} — {} -> {}",
detail.record.slug, detail.record.label, detail.record.locator
@@ -1468,9 +1488,8 @@ fn artifact_detail_output(
if !detail.attachments.is_empty() {
lines.push(format!("attachments: {}", detail.attachments.len()));
}
- detailed_tool_output(
- detail,
- detail,
+ projected_tool_output(
+ &projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1482,10 +1501,9 @@ fn metric_keys_output(
keys: &[MetricKeySummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": keys.len(), "metrics": keys });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::metric_keys(keys);
+ projected_tool_output(
+ &projection,
if keys.is_empty() {
"no metrics".to_owned()
} else {
@@ -1513,10 +1531,9 @@ fn metric_best_output(
entries: &[MetricBestEntry],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": entries.len(), "entries": entries });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::metric_best(entries);
+ projected_tool_output(
+ &projection,
if entries.is_empty() {
"no matching experiments".to_owned()
} else {
@@ -1550,7 +1567,7 @@ fn history_output(
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
let concise = json!({ "count": history.len(), "history": history });
- detailed_tool_output(
+ fallback_detailed_tool_output(
&concise,
&concise,
if history.is_empty() {
@@ -1568,7 +1585,550 @@ fn history_output(
.join("\n")
},
None,
+ libmcp::SurfaceKind::List,
FaultStage::Worker,
operation,
)
}
+
+#[allow(
+ dead_code,
+ reason = "replaced by typed projection structs in crate::mcp::projection"
+)]
+#[allow(
+ clippy::wildcard_imports,
+ reason = "legacy helpers are quarantined pending full purge"
+)]
+mod legacy_projection_values {
+ use super::*;
+
+ fn frontier_summary_value(frontier: &FrontierSummary) -> Value {
+ json!({
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "objective": frontier.objective,
+ "status": frontier.status,
+ "active_hypothesis_count": frontier.active_hypothesis_count,
+ "open_experiment_count": frontier.open_experiment_count,
+ "updated_at": timestamp_value(frontier.updated_at),
+ })
+ }
+
+ fn frontier_record_value(
+ store: &ProjectStore,
+ frontier: &fidget_spinner_core::FrontierRecord,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let roadmap = frontier
+ .brief
+ .roadmap
+ .iter()
+ .map(|item| {
+ let hypothesis = store
+ .read_hypothesis(&item.hypothesis_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "rank": item.rank,
+ "hypothesis": {
+ "slug": hypothesis.record.slug,
+ "title": hypothesis.record.title,
+ "summary": hypothesis.record.summary,
+ },
+ "summary": item.summary,
+ }))
+ })
+ .collect::<Result<Vec<_>, FaultRecord>>()?;
+ Ok(json!({
+ "record": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "objective": frontier.objective,
+ "status": frontier.status,
+ "revision": frontier.revision,
+ "created_at": timestamp_value(frontier.created_at),
+ "updated_at": timestamp_value(frontier.updated_at),
+ "brief": {
+ "situation": frontier.brief.situation,
+ "roadmap": roadmap,
+ "unknowns": frontier.brief.unknowns,
+ "revision": frontier.brief.revision,
+ "updated_at": frontier.brief.updated_at.map(timestamp_value),
+ },
+ }
+ }))
+ }
+
+ fn frontier_open_value(projection: &FrontierOpenProjection) -> Value {
+ let roadmap = projection
+ .frontier
+ .brief
+ .roadmap
+ .iter()
+ .map(|item| {
+ let hypothesis = projection
+ .active_hypotheses
+ .iter()
+ .find(|state| state.hypothesis.id == item.hypothesis_id)
+ .map(|state| {
+ json!({
+ "slug": state.hypothesis.slug,
+ "title": state.hypothesis.title,
+ "summary": state.hypothesis.summary,
+ })
+ });
+ json!({
+ "rank": item.rank,
+ "hypothesis": hypothesis,
+ "summary": item.summary,
+ })
+ })
+ .collect::<Vec<_>>();
+ json!({
+ "frontier": {
+ "slug": projection.frontier.slug,
+ "label": projection.frontier.label,
+ "objective": projection.frontier.objective,
+ "status": projection.frontier.status,
+ "revision": projection.frontier.revision,
+ "created_at": timestamp_value(projection.frontier.created_at),
+ "updated_at": timestamp_value(projection.frontier.updated_at),
+ "brief": {
+ "situation": projection.frontier.brief.situation,
+ "roadmap": roadmap,
+ "unknowns": projection.frontier.brief.unknowns,
+ "revision": projection.frontier.brief.revision,
+ "updated_at": projection.frontier.brief.updated_at.map(timestamp_value),
+ },
+ },
+ "active_tags": projection.active_tags,
+ "active_metric_keys": projection
+ .active_metric_keys
+ .iter()
+ .map(metric_key_summary_value)
+ .collect::<Vec<_>>(),
+ "active_hypotheses": projection
+ .active_hypotheses
+ .iter()
+ .map(hypothesis_current_state_value)
+ .collect::<Vec<_>>(),
+ "open_experiments": projection
+ .open_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ })
+ }
+
+ fn hypothesis_summary_value(
+ hypothesis: &fidget_spinner_store_sqlite::HypothesisSummary,
+ ) -> Value {
+ json!({
+ "slug": hypothesis.slug,
+ "archived": hypothesis.archived,
+ "title": hypothesis.title,
+ "summary": hypothesis.summary,
+ "tags": hypothesis.tags,
+ "open_experiment_count": hypothesis.open_experiment_count,
+ "latest_verdict": hypothesis.latest_verdict,
+ "updated_at": timestamp_value(hypothesis.updated_at),
+ })
+ }
+
+ fn hypothesis_record_value(hypothesis: &fidget_spinner_core::HypothesisRecord) -> Value {
+ json!({
+ "slug": hypothesis.slug,
+ "archived": hypothesis.archived,
+ "title": hypothesis.title,
+ "summary": hypothesis.summary,
+ "body": hypothesis.body,
+ "tags": hypothesis.tags,
+ "revision": hypothesis.revision,
+ "created_at": timestamp_value(hypothesis.created_at),
+ "updated_at": timestamp_value(hypothesis.updated_at),
+ })
+ }
+
+ fn hypothesis_detail_concise_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::HypothesisDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "record": {
+ "slug": detail.record.slug,
+ "archived": detail.record.archived,
+ "title": detail.record.title,
+ "summary": detail.record.summary,
+ "tags": detail.record.tags,
+ "revision": detail.record.revision,
+ "updated_at": timestamp_value(detail.record.updated_at),
+ },
+ "frontier": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ },
+ "parents": detail.parents.len(),
+ "children": detail.children.len(),
+ "open_experiments": detail
+ .open_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ "latest_closed_experiment": detail
+ .closed_experiments
+ .first()
+ .map(experiment_summary_value),
+ "artifact_count": detail.artifacts.len(),
+ }))
+ }
+
+ fn hypothesis_detail_full_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::HypothesisDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "record": hypothesis_record_value(&detail.record),
+ "frontier": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ },
+ "parents": detail.parents.iter().map(vertex_summary_value).collect::<Vec<_>>(),
+ "children": detail.children.iter().map(vertex_summary_value).collect::<Vec<_>>(),
+ "open_experiments": detail
+ .open_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ "closed_experiments": detail
+ .closed_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ "artifacts": detail.artifacts.iter().map(artifact_summary_value).collect::<Vec<_>>(),
+ }))
+ }
+
+ fn experiment_summary_value(
+ experiment: &fidget_spinner_store_sqlite::ExperimentSummary,
+ ) -> Value {
+ json!({
+ "slug": experiment.slug,
+ "archived": experiment.archived,
+ "title": experiment.title,
+ "summary": experiment.summary,
+ "tags": experiment.tags,
+ "status": experiment.status,
+ "verdict": experiment.verdict,
+ "primary_metric": experiment
+ .primary_metric
+ .as_ref()
+ .map(metric_observation_summary_value),
+ "updated_at": timestamp_value(experiment.updated_at),
+ "closed_at": experiment.closed_at.map(timestamp_value),
+ })
+ }
+
+ fn experiment_record_value(experiment: &fidget_spinner_core::ExperimentRecord) -> Value {
+ json!({
+ "slug": experiment.slug,
+ "archived": experiment.archived,
+ "title": experiment.title,
+ "summary": experiment.summary,
+ "tags": experiment.tags,
+ "status": experiment.status,
+ "outcome": experiment.outcome.as_ref().map(experiment_outcome_value),
+ "revision": experiment.revision,
+ "created_at": timestamp_value(experiment.created_at),
+ "updated_at": timestamp_value(experiment.updated_at),
+ })
+ }
+
+ fn experiment_detail_concise_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::ExperimentDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "record": {
+ "slug": detail.record.slug,
+ "archived": detail.record.archived,
+ "title": detail.record.title,
+ "summary": detail.record.summary,
+ "tags": detail.record.tags,
+ "status": detail.record.status,
+ "verdict": detail.record.outcome.as_ref().map(|outcome| outcome.verdict),
+ "revision": detail.record.revision,
+ "updated_at": timestamp_value(detail.record.updated_at),
+ },
+ "frontier": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ },
+ "owning_hypothesis": hypothesis_summary_value(&detail.owning_hypothesis),
+ "parents": detail.parents.len(),
+ "children": detail.children.len(),
+ "artifact_count": detail.artifacts.len(),
+ "outcome": detail.record.outcome.as_ref().map(experiment_outcome_value),
+ }))
+ }
+
+ fn experiment_detail_full_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::ExperimentDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "record": experiment_record_value(&detail.record),
+ "frontier": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ },
+ "owning_hypothesis": hypothesis_summary_value(&detail.owning_hypothesis),
+ "parents": detail.parents.iter().map(vertex_summary_value).collect::<Vec<_>>(),
+ "children": detail.children.iter().map(vertex_summary_value).collect::<Vec<_>>(),
+ "artifacts": detail.artifacts.iter().map(artifact_summary_value).collect::<Vec<_>>(),
+ }))
+ }
+
+ fn artifact_summary_value(artifact: &fidget_spinner_store_sqlite::ArtifactSummary) -> Value {
+ json!({
+ "slug": artifact.slug,
+ "kind": artifact.kind,
+ "label": artifact.label,
+ "summary": artifact.summary,
+ "locator": artifact.locator,
+ "media_type": artifact.media_type,
+ "updated_at": timestamp_value(artifact.updated_at),
+ })
+ }
+
+ fn artifact_record_value(artifact: &fidget_spinner_core::ArtifactRecord) -> Value {
+ json!({
+ "slug": artifact.slug,
+ "kind": artifact.kind,
+ "label": artifact.label,
+ "summary": artifact.summary,
+ "locator": artifact.locator,
+ "media_type": artifact.media_type,
+ "revision": artifact.revision,
+ "created_at": timestamp_value(artifact.created_at),
+ "updated_at": timestamp_value(artifact.updated_at),
+ })
+ }
+
+ fn artifact_detail_concise_value(
+ detail: &fidget_spinner_store_sqlite::ArtifactDetail,
+ ) -> Value {
+ json!({
+ "record": {
+ "slug": detail.record.slug,
+ "kind": detail.record.kind,
+ "label": detail.record.label,
+ "summary": detail.record.summary,
+ "locator": detail.record.locator,
+ "media_type": detail.record.media_type,
+ "revision": detail.record.revision,
+ "updated_at": timestamp_value(detail.record.updated_at),
+ },
+ "attachment_count": detail.attachments.len(),
+ })
+ }
+
+ fn artifact_detail_full_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::ArtifactDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let attachments = detail
+ .attachments
+ .iter()
+ .copied()
+ .map(|attachment| attachment_target_value(store, attachment, operation))
+ .collect::<Result<Vec<_>, FaultRecord>>()?;
+ Ok(json!({
+ "record": artifact_record_value(&detail.record),
+ "attachments": attachments,
+ }))
+ }
+
+ fn hypothesis_current_state_value(
+ state: &fidget_spinner_store_sqlite::HypothesisCurrentState,
+ ) -> Value {
+ json!({
+ "hypothesis": hypothesis_summary_value(&state.hypothesis),
+ "open_experiments": state
+ .open_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ "latest_closed_experiment": state
+ .latest_closed_experiment
+ .as_ref()
+ .map(experiment_summary_value),
+ })
+ }
+
+ fn metric_key_summary_value(metric: &MetricKeySummary) -> Value {
+ json!({
+ "key": metric.key,
+ "unit": metric.unit,
+ "objective": metric.objective,
+ "visibility": metric.visibility,
+ "description": metric.description,
+ "reference_count": metric.reference_count,
+ })
+ }
+
+ fn metric_best_entry_value(entry: &MetricBestEntry) -> Value {
+ json!({
+ "experiment": experiment_summary_value(&entry.experiment),
+ "hypothesis": hypothesis_summary_value(&entry.hypothesis),
+ "value": entry.value,
+ "dimensions": dimension_map_value(&entry.dimensions),
+ })
+ }
+
+ fn metric_observation_summary_value(
+ metric: &fidget_spinner_store_sqlite::MetricObservationSummary,
+ ) -> Value {
+ json!({
+ "key": metric.key,
+ "value": metric.value,
+ "unit": metric.unit,
+ "objective": metric.objective,
+ })
+ }
+
+ fn experiment_outcome_value(outcome: &fidget_spinner_core::ExperimentOutcome) -> Value {
+ json!({
+ "backend": outcome.backend,
+ "command": command_recipe_value(&outcome.command),
+ "dimensions": dimension_map_value(&outcome.dimensions),
+ "primary_metric": metric_value_value(&outcome.primary_metric),
+ "supporting_metrics": outcome
+ .supporting_metrics
+ .iter()
+ .map(metric_value_value)
+ .collect::<Vec<_>>(),
+ "verdict": outcome.verdict,
+ "rationale": outcome.rationale,
+ "analysis": outcome.analysis.as_ref().map(experiment_analysis_value),
+ "closed_at": timestamp_value(outcome.closed_at),
+ })
+ }
+
+ fn experiment_analysis_value(analysis: &ExperimentAnalysis) -> Value {
+ json!({
+ "summary": analysis.summary,
+ "body": analysis.body,
+ })
+ }
+
+ fn metric_value_value(metric: &fidget_spinner_core::MetricValue) -> Value {
+ json!({
+ "key": metric.key,
+ "value": metric.value,
+ })
+ }
+
+ fn command_recipe_value(command: &CommandRecipe) -> Value {
+ json!({
+ "argv": command.argv,
+ "working_directory": command.working_directory,
+ "env": command.env,
+ })
+ }
+
+ fn dimension_map_value(dimensions: &BTreeMap<NonEmptyText, RunDimensionValue>) -> Value {
+ let mut object = Map::new();
+ for (key, value) in dimensions {
+ let _ = object.insert(key.to_string(), run_dimension_value(value));
+ }
+ Value::Object(object)
+ }
+
+ fn run_dimension_value(value: &RunDimensionValue) -> Value {
+ match value {
+ RunDimensionValue::String(value) => Value::String(value.to_string()),
+ RunDimensionValue::Numeric(value) => json!(value),
+ RunDimensionValue::Boolean(value) => json!(value),
+ RunDimensionValue::Timestamp(value) => Value::String(value.to_string()),
+ }
+ }
+
+ fn vertex_summary_value(vertex: &VertexSummary) -> Value {
+ json!({
+ "kind": vertex.vertex.kind().as_str(),
+ "slug": vertex.slug,
+ "archived": vertex.archived,
+ "title": vertex.title,
+ "summary": vertex.summary,
+ "updated_at": timestamp_value(vertex.updated_at),
+ })
+ }
+
+ fn attachment_target_value(
+ store: &ProjectStore,
+ attachment: AttachmentTargetRef,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ match attachment {
+ AttachmentTargetRef::Frontier(id) => {
+ let frontier = store
+ .read_frontier(&id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "kind": "frontier",
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ }))
+ }
+ AttachmentTargetRef::Hypothesis(id) => {
+ let hypothesis = store
+ .read_hypothesis(&id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "kind": "hypothesis",
+ "slug": hypothesis.record.slug,
+ "title": hypothesis.record.title,
+ "summary": hypothesis.record.summary,
+ }))
+ }
+ AttachmentTargetRef::Experiment(id) => {
+ let experiment = store
+ .read_experiment(&id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "kind": "experiment",
+ "slug": experiment.record.slug,
+ "title": experiment.record.title,
+ "summary": experiment.record.summary,
+ }))
+ }
+ }
+ }
+
+ fn timestamp_value(timestamp: time::OffsetDateTime) -> String {
+ timestamp
+ .format(&time::format_description::well_known::Rfc3339)
+ .unwrap_or_else(|_| timestamp.unix_timestamp().to_string())
+ }
+}
diff --git a/crates/fidget-spinner-cli/src/ui.rs b/crates/fidget-spinner-cli/src/ui.rs
index 98cc95d..8eb1845 100644
--- a/crates/fidget-spinner-cli/src/ui.rs
+++ b/crates/fidget-spinner-cli/src/ui.rs
@@ -278,7 +278,7 @@ fn render_frontier_grid(frontiers: &[FrontierSummary], limit: Option<u32>) -> Ma
article.mini-card {
div.card-header {
a.title-link href=(frontier_href(&frontier.slug)) { (frontier.label) }
- span.status-chip class=(frontier_status_class(frontier.status.as_str())) {
+ span class=(status_chip_classes(frontier_status_class(frontier.status.as_str()))) {
(frontier.status.as_str())
}
}
@@ -325,7 +325,7 @@ fn render_frontier_header(frontier: &FrontierRecord) -> Markup {
p.prose { (frontier.objective) }
div.meta-row {
span { "slug " code { (frontier.slug) } }
- span.status-chip class=(frontier_status_class(frontier.status.as_str())) {
+ span class=(status_chip_classes(frontier_status_class(frontier.status.as_str()))) {
(frontier.status.as_str())
}
span.muted { "updated " (format_timestamp(frontier.updated_at)) }
@@ -453,7 +453,7 @@ fn render_hypothesis_current_state_grid(
(state.hypothesis.title)
}
@if let Some(verdict) = state.hypothesis.latest_verdict {
- span.status-chip class=(verdict_class(verdict)) {
+ span class=(status_chip_classes(verdict_class(verdict))) {
(verdict.as_str())
}
}
@@ -527,7 +527,7 @@ fn render_hypothesis_header(detail: &HypothesisDetail, frontier: &FrontierRecord
span { "frontier " a href=(frontier_href(&frontier.slug)) { (frontier.label) } }
span { "slug " code { (detail.record.slug) } }
@if detail.record.archived {
- span.status-chip.archived { "archived" }
+ span class="status-chip status-archived" { "archived" }
}
span.muted { "updated " (format_timestamp(detail.record.updated_at)) }
}
@@ -560,7 +560,7 @@ fn render_experiment_header(detail: &ExperimentDetail, frontier: &FrontierRecord
(detail.owning_hypothesis.title)
}
}
- span.status-chip class=(experiment_status_class(detail.record.status)) {
+ span class=(status_chip_classes(experiment_status_class(detail.record.status))) {
(detail.record.status.as_str())
}
@if let Some(verdict) = detail
@@ -569,7 +569,7 @@ fn render_experiment_header(detail: &ExperimentDetail, frontier: &FrontierRecord
.as_ref()
.map(|outcome| outcome.verdict)
{
- span.status-chip class=(verdict_class(verdict)) { (verdict.as_str()) }
+ span class=(status_chip_classes(verdict_class(verdict))) { (verdict.as_str()) }
}
span.muted { "updated " (format_timestamp(detail.record.updated_at)) }
}
@@ -763,7 +763,7 @@ fn render_artifact_section(
article.mini-card {
div.card-header {
a.title-link href=(artifact_href(&artifact.slug)) { (artifact.label) }
- span.status-chip.classless { (artifact.kind.as_str()) }
+ span class="status-chip classless" { (artifact.kind.as_str()) }
}
@if let Some(summary) = artifact.summary.as_ref() {
p.prose { (summary) }
@@ -805,11 +805,11 @@ fn render_experiment_card(experiment: &ExperimentSummary) -> Markup {
article.mini-card {
div.card-header {
a.title-link href=(experiment_href(&experiment.slug)) { (experiment.title) }
- span.status-chip class=(experiment_status_class(experiment.status)) {
+ span class=(status_chip_classes(experiment_status_class(experiment.status))) {
(experiment.status.as_str())
}
@if let Some(verdict) = experiment.verdict {
- span.status-chip class=(verdict_class(verdict)) { (verdict.as_str()) }
+ span class=(status_chip_classes(verdict_class(verdict))) { (verdict.as_str()) }
}
}
@if let Some(summary) = experiment.summary.as_ref() {
@@ -854,9 +854,14 @@ fn render_experiment_summary_line(experiment: &ExperimentSummary) -> Markup {
fn render_experiment_link_chip(experiment: &ExperimentSummary) -> Markup {
html! {
a.link-chip href=(experiment_href(&experiment.slug)) {
- span { (experiment.title) }
- @if let Some(verdict) = experiment.verdict {
- span.status-chip class=(verdict_class(verdict)) { (verdict.as_str()) }
+ span.link-chip-main {
+ span.link-chip-title { (experiment.title) }
+ @if let Some(verdict) = experiment.verdict {
+ span class=(status_chip_classes(verdict_class(verdict))) { (verdict.as_str()) }
+ }
+ }
+ @if experiment.verdict.is_none() && experiment.status == ExperimentStatus::Open {
+ span.link-chip-summary { "open experiment" }
}
}
}
@@ -873,10 +878,12 @@ fn render_vertex_chip(summary: &VertexSummary) -> Markup {
};
html! {
a.link-chip href=(href) {
- span.kind-chip { (kind) }
- span { (summary.title) }
+ span.link-chip-main {
+ span.kind-chip { (kind) }
+ span.link-chip-title { (summary.title) }
+ }
@if let Some(summary_text) = summary.summary.as_ref() {
- span.muted { " — " (summary_text) }
+ span.link-chip-summary { (summary_text) }
}
}
}
@@ -885,10 +892,12 @@ fn render_vertex_chip(summary: &VertexSummary) -> Markup {
fn render_attachment_chip(attachment: &AttachmentDisplay) -> Markup {
html! {
a.link-chip href=(&attachment.href) {
- span.kind-chip { (attachment.kind) }
- span { (&attachment.title) }
+ span.link-chip-main {
+ span.kind-chip { (attachment.kind) }
+ span.link-chip-title { (&attachment.title) }
+ }
@if let Some(summary) = attachment.summary.as_ref() {
- span.muted { " — " (summary) }
+ span.link-chip-summary { (summary) }
}
}
}
@@ -1091,6 +1100,10 @@ fn experiment_status_class(status: ExperimentStatus) -> &'static str {
}
}
+fn status_chip_classes(extra_class: &str) -> String {
+ format!("status-chip {extra_class}")
+}
+
fn verdict_class(verdict: FrontierVerdict) -> &'static str {
match verdict {
FrontierVerdict::Accepted => "status-accepted",
@@ -1114,25 +1127,29 @@ fn limit_items<T>(items: &[T], limit: Option<u32>) -> &[T] {
fn styles() -> &'static str {
r#"
:root {
- color-scheme: dark;
- --bg: #091019;
- --panel: #0f1823;
- --panel-2: #131f2d;
- --border: #1e3850;
- --text: #d8e6f3;
- --muted: #87a0b8;
- --accent: #6dc7ff;
- --accepted: #7ce38b;
- --kept: #8de0c0;
- --parked: #d9c17d;
- --rejected: #ee7a7a;
+ color-scheme: light;
+ --bg: #f6f3ec;
+ --panel: #fffdf8;
+ --panel-2: #f3eee4;
+ --border: #d8d1c4;
+ --border-strong: #c8bfaf;
+ --text: #22201a;
+ --muted: #746e62;
+ --accent: #2d5c4d;
+ --accent-soft: #dbe8e2;
+ --tag: #ece5d8;
+ --accepted: #2f6b43;
+ --kept: #3d6656;
+ --parked: #8b5b24;
+ --rejected: #8a2f2f;
+ --shadow: rgba(74, 58, 32, 0.06);
}
* { box-sizing: border-box; }
body {
margin: 0;
background: var(--bg);
color: var(--text);
- font: 15px/1.5 "Iosevka Web", "Iosevka", "JetBrains Mono", monospace;
+ font: 15px/1.55 "Iosevka Web", "IBM Plex Mono", "SFMono-Regular", monospace;
}
a {
color: var(--accent);
@@ -1140,92 +1157,102 @@ fn styles() -> &'static str {
}
a:hover { text-decoration: underline; }
.shell {
- width: min(1500px, 100%);
+ width: min(1360px, 100%);
margin: 0 auto;
- padding: 20px;
+ padding: 24px 24px 40px;
display: grid;
- gap: 16px;
+ gap: 18px;
}
.page-header {
display: grid;
- gap: 8px;
- padding: 16px 18px;
+ gap: 10px;
+ padding: 18px 20px;
border: 1px solid var(--border);
background: var(--panel);
+ box-shadow: 0 1px 0 var(--shadow);
}
.eyebrow {
display: flex;
gap: 10px;
+ flex-wrap: wrap;
color: var(--muted);
font-size: 13px;
text-transform: uppercase;
letter-spacing: 0.05em;
}
- .sep { color: #4d6478; }
+ .sep { color: #a08d70; }
.page-title {
margin: 0;
font-size: clamp(22px, 3.8vw, 34px);
line-height: 1.1;
+ overflow-wrap: anywhere;
}
.page-subtitle {
margin: 0;
color: var(--muted);
max-width: 90ch;
+ overflow-wrap: anywhere;
}
.card {
border: 1px solid var(--border);
background: var(--panel);
- padding: 16px 18px;
+ padding: 18px 20px;
display: grid;
- gap: 12px;
+ gap: 14px;
+ box-shadow: 0 1px 0 var(--shadow);
}
.subcard {
- border: 1px solid #1a2b3c;
+ border: 1px solid var(--border);
background: var(--panel-2);
padding: 12px 14px;
display: grid;
gap: 10px;
min-width: 0;
+ align-content: start;
}
.block { display: grid; gap: 10px; }
.split {
display: grid;
gap: 16px;
grid-template-columns: repeat(auto-fit, minmax(320px, 1fr));
+ align-items: start;
}
.card-grid {
display: grid;
gap: 12px;
- grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
+ grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
+ align-items: start;
}
.mini-card {
- border: 1px solid #1a2b3c;
+ border: 1px solid var(--border);
background: var(--panel-2);
padding: 12px 14px;
display: grid;
gap: 9px;
min-width: 0;
+ align-content: start;
}
.card-header {
display: flex;
gap: 10px;
- align-items: center;
+ align-items: flex-start;
flex-wrap: wrap;
}
.title-link {
font-size: 16px;
font-weight: 700;
- color: #f2f8ff;
+ color: var(--text);
+ overflow-wrap: anywhere;
}
h1, h2, h3 {
margin: 0;
line-height: 1.15;
}
h2 { font-size: 19px; }
- h3 { font-size: 14px; color: #c9d8e6; }
+ h3 { font-size: 14px; color: #4f473a; }
.prose {
margin: 0;
- color: #dce9f6;
+ color: var(--text);
max-width: 92ch;
white-space: pre-wrap;
}
@@ -1233,14 +1260,14 @@ fn styles() -> &'static str {
.meta-row {
display: flex;
flex-wrap: wrap;
- gap: 14px;
+ gap: 8px 14px;
align-items: center;
font-size: 13px;
}
.kv-grid {
display: grid;
gap: 10px 14px;
- grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
+ grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
}
.kv {
display: grid;
@@ -1260,18 +1287,44 @@ fn styles() -> &'static str {
display: flex;
flex-wrap: wrap;
gap: 8px;
+ align-items: flex-start;
}
- .tag-chip, .kind-chip, .status-chip, .metric-pill, .link-chip {
- border: 1px solid #24425b;
- background: rgba(109, 199, 255, 0.06);
+ .tag-chip, .kind-chip, .status-chip, .metric-pill {
+ display: inline-flex;
+ align-items: center;
+ width: fit-content;
+ max-width: 100%;
+ border: 1px solid var(--border-strong);
+ background: var(--tag);
padding: 4px 8px;
font-size: 12px;
line-height: 1.2;
}
.link-chip {
- display: inline-flex;
- gap: 8px;
- align-items: center;
+ display: inline-grid;
+ gap: 4px;
+ align-content: start;
+ max-width: min(100%, 72ch);
+ padding: 8px 10px;
+ border: 1px solid var(--border);
+ background: var(--panel);
+ min-width: 0;
+ }
+ .link-chip-main {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 6px 8px;
+ align-items: flex-start;
+ min-width: 0;
+ }
+ .link-chip-title {
+ overflow-wrap: anywhere;
+ }
+ .link-chip-summary {
+ color: var(--muted);
+ font-size: 12px;
+ line-height: 1.4;
+ overflow-wrap: anywhere;
}
.kind-chip {
color: var(--muted);
@@ -1283,14 +1336,13 @@ fn styles() -> &'static str {
letter-spacing: 0.05em;
font-weight: 700;
}
- .status-accepted { color: var(--accepted); border-color: rgba(124, 227, 139, 0.35); }
- .status-kept { color: var(--kept); border-color: rgba(141, 224, 192, 0.35); }
- .status-parked { color: var(--parked); border-color: rgba(217, 193, 125, 0.35); }
- .status-rejected { color: var(--rejected); border-color: rgba(238, 122, 122, 0.35); }
- .status-open { color: var(--accent); border-color: rgba(109, 199, 255, 0.35); }
- .status-exploring { color: var(--accent); border-color: rgba(109, 199, 255, 0.35); }
- .status-neutral, .classless { color: #a7c0d4; border-color: #2a4358; }
- .status-archived { color: #7f8da0; border-color: #2b3540; }
+ .status-accepted { color: var(--accepted); border-color: rgba(47, 107, 67, 0.25); background: rgba(47, 107, 67, 0.08); }
+ .status-kept { color: var(--kept); border-color: rgba(61, 102, 86, 0.25); background: rgba(61, 102, 86, 0.08); }
+ .status-parked { color: var(--parked); border-color: rgba(139, 91, 36, 0.25); background: rgba(139, 91, 36, 0.09); }
+ .status-rejected { color: var(--rejected); border-color: rgba(138, 47, 47, 0.25); background: rgba(138, 47, 47, 0.09); }
+ .status-open, .status-exploring { color: var(--accent); border-color: rgba(45, 92, 77, 0.25); background: var(--accent-soft); }
+ .status-neutral, .classless { color: #5f584d; border-color: var(--border-strong); background: var(--panel); }
+ .status-archived { color: #7a756d; border-color: var(--border); background: var(--panel); }
.metric-table {
width: 100%;
border-collapse: collapse;
@@ -1299,7 +1351,7 @@ fn styles() -> &'static str {
.metric-table th,
.metric-table td {
padding: 7px 8px;
- border-top: 1px solid #1b2d3e;
+ border-top: 1px solid var(--border);
text-align: left;
vertical-align: top;
}
@@ -1320,22 +1372,28 @@ fn styles() -> &'static str {
display: grid;
gap: 6px;
}
+ .roadmap-list li, .simple-list li {
+ overflow-wrap: anywhere;
+ }
.code-block {
white-space: pre-wrap;
overflow-wrap: anywhere;
- border: 1px solid #1a2b3c;
- background: #0b131c;
+ border: 1px solid var(--border);
+ background: var(--panel-2);
padding: 12px 14px;
}
code {
font-family: inherit;
font-size: 0.95em;
+ background: var(--panel-2);
+ padding: 0.05rem 0.3rem;
}
@media (max-width: 720px) {
.shell { padding: 12px; }
.card, .page-header { padding: 14px; }
.subcard, .mini-card { padding: 12px; }
.card-grid, .split, .kv-grid { grid-template-columns: 1fr; }
+ .page-title { font-size: 18px; }
}
"#
}