swarm repositories / source
aboutsummaryrefslogtreecommitdiff
path: root/crates/fidget-spinner-cli/src/mcp/service.rs
diff options
context:
space:
mode:
authormain <main@swarm.moe>2026-03-20 20:17:39 -0400
committermain <main@swarm.moe>2026-03-20 20:17:39 -0400
commit203d4a93e1aaa5e325e8e6999bf26fa092f3d424 (patch)
tree33abbe926c7389d3f2b90d5e30f7145af999f85e /crates/fidget-spinner-cli/src/mcp/service.rs
parent639b39c43097889736306df5ec22f8692af743dd (diff)
downloadfidget_spinner-203d4a93e1aaa5e325e8e6999bf26fa092f3d424.zip
Adopt typed MCP projections and polish UI
Diffstat (limited to 'crates/fidget-spinner-cli/src/mcp/service.rs')
-rw-r--r--crates/fidget-spinner-cli/src/mcp/service.rs748
1 files changed, 654 insertions, 94 deletions
diff --git a/crates/fidget-spinner-cli/src/mcp/service.rs b/crates/fidget-spinner-cli/src/mcp/service.rs
index d500156..3ce68ae 100644
--- a/crates/fidget-spinner-cli/src/mcp/service.rs
+++ b/crates/fidget-spinner-cli/src/mcp/service.rs
@@ -7,8 +7,8 @@ use std::time::UNIX_EPOCH;
use camino::{Utf8Path, Utf8PathBuf};
use fidget_spinner_core::{
- ArtifactKind, CommandRecipe, ExecutionBackend, ExperimentAnalysis, ExperimentStatus,
- FieldValueType, FrontierVerdict, MetricUnit, MetricVisibility, NonEmptyText,
+ ArtifactKind, AttachmentTargetRef, CommandRecipe, ExecutionBackend, ExperimentAnalysis,
+ ExperimentStatus, FieldValueType, FrontierVerdict, MetricUnit, MetricVisibility, NonEmptyText,
OptimizationObjective, RunDimensionValue, Slug, TagName,
};
use fidget_spinner_store_sqlite::{
@@ -19,15 +19,17 @@ use fidget_spinner_store_sqlite::{
MetricBestQuery, MetricKeySummary, MetricKeysQuery, MetricRankOrder, MetricScope,
OpenExperimentRequest, ProjectStatus, ProjectStore, StoreError, TextPatch,
UpdateArtifactRequest, UpdateExperimentRequest, UpdateFrontierBriefRequest,
- UpdateHypothesisRequest, VertexSelector,
+ UpdateHypothesisRequest, VertexSelector, VertexSummary,
};
use serde::Deserialize;
use serde_json::{Map, Value, json};
use crate::mcp::fault::{FaultKind, FaultRecord, FaultStage};
use crate::mcp::output::{
- ToolOutput, detailed_tool_output, split_presentation, tool_output, tool_success,
+ ToolOutput, fallback_detailed_tool_output, fallback_tool_output, projected_tool_output,
+ split_presentation, tool_success,
};
+use crate::mcp::projection;
use crate::mcp::protocol::{TRANSIENT_ONCE_ENV, TRANSIENT_ONCE_MARKER_ENV, WorkerOperation};
pub(crate) struct WorkerService {
@@ -101,7 +103,13 @@ impl WorkerService {
TagName::new(args.name).map_err(store_fault(&operation))?,
NonEmptyText::new(args.description).map_err(store_fault(&operation))?,
));
- tool_output(&tag, FaultStage::Worker, &operation)?
+ fallback_tool_output(
+ &tag,
+ &tag,
+ libmcp::SurfaceKind::Mutation,
+ FaultStage::Worker,
+ &operation,
+ )?
}
"tag.list" => tag_list_output(&lift!(self.store.list_tags()), &operation)?,
"frontier.create" => {
@@ -118,7 +126,7 @@ impl WorkerService {
.map_err(store_fault(&operation))?,
})
);
- frontier_record_output(&frontier, &operation)?
+ frontier_record_output(&self.store, &frontier, &operation)?
}
"frontier.list" => {
frontier_list_output(&lift!(self.store.list_frontiers()), &operation)?
@@ -126,6 +134,7 @@ impl WorkerService {
"frontier.read" => {
let args = deserialize::<FrontierSelectorArgs>(arguments)?;
frontier_record_output(
+ &self.store,
&lift!(self.store.read_frontier(&args.frontier)),
&operation,
)?
@@ -173,7 +182,7 @@ impl WorkerService {
.transpose()?,
})
);
- frontier_record_output(&frontier, &operation)?
+ frontier_record_output(&self.store, &frontier, &operation)?
}
"frontier.history" => {
let args = deserialize::<FrontierSelectorArgs>(arguments)?;
@@ -219,6 +228,7 @@ impl WorkerService {
"hypothesis.read" => {
let args = deserialize::<HypothesisSelectorArgs>(arguments)?;
hypothesis_detail_output(
+ &self.store,
&lift!(self.store.read_hypothesis(&args.hypothesis)),
&operation,
)?
@@ -303,6 +313,7 @@ impl WorkerService {
"experiment.read" => {
let args = deserialize::<ExperimentSelectorArgs>(arguments)?;
experiment_detail_output(
+ &self.store,
&lift!(self.store.read_experiment(&args.experiment)),
&operation,
)?
@@ -409,6 +420,7 @@ impl WorkerService {
"artifact.read" => {
let args = deserialize::<ArtifactSelectorArgs>(arguments)?;
artifact_detail_output(
+ &self.store,
&lift!(self.store.read_artifact(&args.artifact)),
&operation,
)?
@@ -446,20 +458,23 @@ impl WorkerService {
}
"metric.define" => {
let args = deserialize::<MetricDefineArgs>(arguments)?;
- tool_output(
- &lift!(
- self.store.define_metric(DefineMetricRequest {
- key: NonEmptyText::new(args.key).map_err(store_fault(&operation))?,
- unit: args.unit,
- objective: args.objective,
- visibility: args.visibility.unwrap_or(MetricVisibility::Canonical),
- description: args
- .description
- .map(NonEmptyText::new)
- .transpose()
- .map_err(store_fault(&operation))?,
- })
- ),
+ let metric = lift!(
+ self.store.define_metric(DefineMetricRequest {
+ key: NonEmptyText::new(args.key).map_err(store_fault(&operation))?,
+ unit: args.unit,
+ objective: args.objective,
+ visibility: args.visibility.unwrap_or(MetricVisibility::Canonical),
+ description: args
+ .description
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault(&operation))?,
+ })
+ );
+ fallback_tool_output(
+ &metric,
+ &metric,
+ libmcp::SurfaceKind::Mutation,
FaultStage::Worker,
&operation,
)?
@@ -491,27 +506,35 @@ impl WorkerService {
}
"run.dimension.define" => {
let args = deserialize::<DimensionDefineArgs>(arguments)?;
- tool_output(
- &lift!(
- self.store.define_run_dimension(DefineRunDimensionRequest {
- key: NonEmptyText::new(args.key).map_err(store_fault(&operation))?,
- value_type: args.value_type,
- description: args
- .description
- .map(NonEmptyText::new)
- .transpose()
- .map_err(store_fault(&operation))?,
- })
- ),
+ let dimension = lift!(
+ self.store.define_run_dimension(DefineRunDimensionRequest {
+ key: NonEmptyText::new(args.key).map_err(store_fault(&operation))?,
+ value_type: args.value_type,
+ description: args
+ .description
+ .map(NonEmptyText::new)
+ .transpose()
+ .map_err(store_fault(&operation))?,
+ })
+ );
+ fallback_tool_output(
+ &dimension,
+ &dimension,
+ libmcp::SurfaceKind::Mutation,
+ FaultStage::Worker,
+ &operation,
+ )?
+ }
+ "run.dimension.list" => {
+ let dimensions = lift!(self.store.list_run_dimensions());
+ fallback_tool_output(
+ &dimensions,
+ &dimensions,
+ libmcp::SurfaceKind::List,
FaultStage::Worker,
&operation,
)?
}
- "run.dimension.list" => tool_output(
- &lift!(self.store.list_run_dimensions()),
- FaultStage::Worker,
- &operation,
- )?,
other => {
return Err(FaultRecord::new(
FaultKind::InvalidInput,
@@ -1019,7 +1042,7 @@ fn project_status_output(
"open_experiment_count": status.open_experiment_count,
"artifact_count": status.artifact_count,
});
- detailed_tool_output(
+ fallback_detailed_tool_output(
&concise,
status,
[
@@ -1035,6 +1058,7 @@ fn project_status_output(
]
.join("\n"),
None,
+ libmcp::SurfaceKind::Overview,
FaultStage::Worker,
operation,
)
@@ -1048,7 +1072,7 @@ fn tag_list_output(
"count": tags.len(),
"tags": tags,
});
- detailed_tool_output(
+ fallback_detailed_tool_output(
&concise,
&concise,
if tags.is_empty() {
@@ -1060,6 +1084,7 @@ fn tag_list_output(
.join("\n")
},
None,
+ libmcp::SurfaceKind::List,
FaultStage::Worker,
operation,
)
@@ -1069,10 +1094,9 @@ fn frontier_list_output(
frontiers: &[FrontierSummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": frontiers.len(), "frontiers": frontiers });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::frontier_list(frontiers);
+ projected_tool_output(
+ &projection,
if frontiers.is_empty() {
"no frontiers".to_owned()
} else {
@@ -1097,9 +1121,11 @@ fn frontier_list_output(
}
fn frontier_record_output(
+ store: &ProjectStore,
frontier: &fidget_spinner_core::FrontierRecord,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::frontier_record(store, frontier, operation)?;
let mut lines = vec![format!(
"frontier {} — {}",
frontier.slug, frontier.objective
@@ -1133,9 +1159,8 @@ fn frontier_record_output(
.join("; ")
));
}
- detailed_tool_output(
- &frontier,
- frontier,
+ projected_tool_output(
+ &projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1147,6 +1172,7 @@ fn frontier_open_output(
projection: &FrontierOpenProjection,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let output_projection = projection::frontier_open(projection);
let mut lines = vec![format!(
"frontier {} — {}",
projection.frontier.slug, projection.frontier.objective
@@ -1209,9 +1235,8 @@ fn frontier_open_output(
));
}
}
- detailed_tool_output(
- projection,
- projection,
+ projected_tool_output(
+ &output_projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1223,9 +1248,9 @@ fn hypothesis_record_output(
hypothesis: &fidget_spinner_core::HypothesisRecord,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- detailed_tool_output(
- hypothesis,
- hypothesis,
+ let projection = projection::hypothesis_record(hypothesis);
+ projected_tool_output(
+ &projection,
format!("hypothesis {} — {}", hypothesis.slug, hypothesis.summary),
None,
FaultStage::Worker,
@@ -1237,10 +1262,9 @@ fn hypothesis_list_output(
hypotheses: &[fidget_spinner_store_sqlite::HypothesisSummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": hypotheses.len(), "hypotheses": hypotheses });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::hypothesis_list(hypotheses);
+ projected_tool_output(
+ &projection,
if hypotheses.is_empty() {
"no hypotheses".to_owned()
} else {
@@ -1269,9 +1293,11 @@ fn hypothesis_list_output(
}
fn hypothesis_detail_output(
+ store: &ProjectStore,
detail: &fidget_spinner_store_sqlite::HypothesisDetail,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::hypothesis_detail(store, detail, operation)?;
let mut lines = vec![
format!(
"hypothesis {} — {}",
@@ -1299,9 +1325,8 @@ fn hypothesis_detail_output(
detail.closed_experiments.len(),
detail.artifacts.len()
));
- detailed_tool_output(
- detail,
- detail,
+ projected_tool_output(
+ &projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1313,6 +1338,7 @@ fn experiment_record_output(
experiment: &fidget_spinner_core::ExperimentRecord,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::experiment_record(experiment);
let mut line = format!("experiment {} — {}", experiment.slug, experiment.title);
if let Some(outcome) = experiment.outcome.as_ref() {
let _ = write!(
@@ -1325,24 +1351,16 @@ fn experiment_record_output(
} else {
let _ = write!(line, " | open");
}
- detailed_tool_output(
- experiment,
- experiment,
- line,
- None,
- FaultStage::Worker,
- operation,
- )
+ projected_tool_output(&projection, line, None, FaultStage::Worker, operation)
}
fn experiment_list_output(
experiments: &[fidget_spinner_store_sqlite::ExperimentSummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": experiments.len(), "experiments": experiments });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::experiment_list(experiments);
+ projected_tool_output(
+ &projection,
if experiments.is_empty() {
"no experiments".to_owned()
} else {
@@ -1374,9 +1392,11 @@ fn experiment_list_output(
}
fn experiment_detail_output(
+ store: &ProjectStore,
detail: &fidget_spinner_store_sqlite::ExperimentDetail,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::experiment_detail(store, detail, operation)?;
let mut lines = vec![format!(
"experiment {} — {}",
detail.record.slug, detail.record.title
@@ -1402,9 +1422,8 @@ fn experiment_detail_output(
detail.children.len(),
detail.artifacts.len()
));
- detailed_tool_output(
- detail,
- detail,
+ projected_tool_output(
+ &projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1416,9 +1435,9 @@ fn artifact_record_output(
artifact: &fidget_spinner_core::ArtifactRecord,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- detailed_tool_output(
- artifact,
- artifact,
+ let projection = projection::artifact_record(artifact);
+ projected_tool_output(
+ &projection,
format!(
"artifact {} — {} -> {}",
artifact.slug, artifact.label, artifact.locator
@@ -1433,10 +1452,9 @@ fn artifact_list_output(
artifacts: &[fidget_spinner_store_sqlite::ArtifactSummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": artifacts.len(), "artifacts": artifacts });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::artifact_list(artifacts);
+ projected_tool_output(
+ &projection,
if artifacts.is_empty() {
"no artifacts".to_owned()
} else {
@@ -1458,9 +1476,11 @@ fn artifact_list_output(
}
fn artifact_detail_output(
+ store: &ProjectStore,
detail: &fidget_spinner_store_sqlite::ArtifactDetail,
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
+ let projection = projection::artifact_detail(store, detail, operation)?;
let mut lines = vec![format!(
"artifact {} — {} -> {}",
detail.record.slug, detail.record.label, detail.record.locator
@@ -1468,9 +1488,8 @@ fn artifact_detail_output(
if !detail.attachments.is_empty() {
lines.push(format!("attachments: {}", detail.attachments.len()));
}
- detailed_tool_output(
- detail,
- detail,
+ projected_tool_output(
+ &projection,
lines.join("\n"),
None,
FaultStage::Worker,
@@ -1482,10 +1501,9 @@ fn metric_keys_output(
keys: &[MetricKeySummary],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": keys.len(), "metrics": keys });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::metric_keys(keys);
+ projected_tool_output(
+ &projection,
if keys.is_empty() {
"no metrics".to_owned()
} else {
@@ -1513,10 +1531,9 @@ fn metric_best_output(
entries: &[MetricBestEntry],
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
- let concise = json!({ "count": entries.len(), "entries": entries });
- detailed_tool_output(
- &concise,
- &concise,
+ let projection = projection::metric_best(entries);
+ projected_tool_output(
+ &projection,
if entries.is_empty() {
"no matching experiments".to_owned()
} else {
@@ -1550,7 +1567,7 @@ fn history_output(
operation: &str,
) -> Result<ToolOutput, FaultRecord> {
let concise = json!({ "count": history.len(), "history": history });
- detailed_tool_output(
+ fallback_detailed_tool_output(
&concise,
&concise,
if history.is_empty() {
@@ -1568,7 +1585,550 @@ fn history_output(
.join("\n")
},
None,
+ libmcp::SurfaceKind::List,
FaultStage::Worker,
operation,
)
}
+
+#[allow(
+ dead_code,
+ reason = "replaced by typed projection structs in crate::mcp::projection"
+)]
+#[allow(
+ clippy::wildcard_imports,
+ reason = "legacy helpers are quarantined pending full purge"
+)]
+mod legacy_projection_values {
+ use super::*;
+
+ fn frontier_summary_value(frontier: &FrontierSummary) -> Value {
+ json!({
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "objective": frontier.objective,
+ "status": frontier.status,
+ "active_hypothesis_count": frontier.active_hypothesis_count,
+ "open_experiment_count": frontier.open_experiment_count,
+ "updated_at": timestamp_value(frontier.updated_at),
+ })
+ }
+
+ fn frontier_record_value(
+ store: &ProjectStore,
+ frontier: &fidget_spinner_core::FrontierRecord,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let roadmap = frontier
+ .brief
+ .roadmap
+ .iter()
+ .map(|item| {
+ let hypothesis = store
+ .read_hypothesis(&item.hypothesis_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "rank": item.rank,
+ "hypothesis": {
+ "slug": hypothesis.record.slug,
+ "title": hypothesis.record.title,
+ "summary": hypothesis.record.summary,
+ },
+ "summary": item.summary,
+ }))
+ })
+ .collect::<Result<Vec<_>, FaultRecord>>()?;
+ Ok(json!({
+ "record": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "objective": frontier.objective,
+ "status": frontier.status,
+ "revision": frontier.revision,
+ "created_at": timestamp_value(frontier.created_at),
+ "updated_at": timestamp_value(frontier.updated_at),
+ "brief": {
+ "situation": frontier.brief.situation,
+ "roadmap": roadmap,
+ "unknowns": frontier.brief.unknowns,
+ "revision": frontier.brief.revision,
+ "updated_at": frontier.brief.updated_at.map(timestamp_value),
+ },
+ }
+ }))
+ }
+
+ fn frontier_open_value(projection: &FrontierOpenProjection) -> Value {
+ let roadmap = projection
+ .frontier
+ .brief
+ .roadmap
+ .iter()
+ .map(|item| {
+ let hypothesis = projection
+ .active_hypotheses
+ .iter()
+ .find(|state| state.hypothesis.id == item.hypothesis_id)
+ .map(|state| {
+ json!({
+ "slug": state.hypothesis.slug,
+ "title": state.hypothesis.title,
+ "summary": state.hypothesis.summary,
+ })
+ });
+ json!({
+ "rank": item.rank,
+ "hypothesis": hypothesis,
+ "summary": item.summary,
+ })
+ })
+ .collect::<Vec<_>>();
+ json!({
+ "frontier": {
+ "slug": projection.frontier.slug,
+ "label": projection.frontier.label,
+ "objective": projection.frontier.objective,
+ "status": projection.frontier.status,
+ "revision": projection.frontier.revision,
+ "created_at": timestamp_value(projection.frontier.created_at),
+ "updated_at": timestamp_value(projection.frontier.updated_at),
+ "brief": {
+ "situation": projection.frontier.brief.situation,
+ "roadmap": roadmap,
+ "unknowns": projection.frontier.brief.unknowns,
+ "revision": projection.frontier.brief.revision,
+ "updated_at": projection.frontier.brief.updated_at.map(timestamp_value),
+ },
+ },
+ "active_tags": projection.active_tags,
+ "active_metric_keys": projection
+ .active_metric_keys
+ .iter()
+ .map(metric_key_summary_value)
+ .collect::<Vec<_>>(),
+ "active_hypotheses": projection
+ .active_hypotheses
+ .iter()
+ .map(hypothesis_current_state_value)
+ .collect::<Vec<_>>(),
+ "open_experiments": projection
+ .open_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ })
+ }
+
+ fn hypothesis_summary_value(
+ hypothesis: &fidget_spinner_store_sqlite::HypothesisSummary,
+ ) -> Value {
+ json!({
+ "slug": hypothesis.slug,
+ "archived": hypothesis.archived,
+ "title": hypothesis.title,
+ "summary": hypothesis.summary,
+ "tags": hypothesis.tags,
+ "open_experiment_count": hypothesis.open_experiment_count,
+ "latest_verdict": hypothesis.latest_verdict,
+ "updated_at": timestamp_value(hypothesis.updated_at),
+ })
+ }
+
+ fn hypothesis_record_value(hypothesis: &fidget_spinner_core::HypothesisRecord) -> Value {
+ json!({
+ "slug": hypothesis.slug,
+ "archived": hypothesis.archived,
+ "title": hypothesis.title,
+ "summary": hypothesis.summary,
+ "body": hypothesis.body,
+ "tags": hypothesis.tags,
+ "revision": hypothesis.revision,
+ "created_at": timestamp_value(hypothesis.created_at),
+ "updated_at": timestamp_value(hypothesis.updated_at),
+ })
+ }
+
+ fn hypothesis_detail_concise_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::HypothesisDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "record": {
+ "slug": detail.record.slug,
+ "archived": detail.record.archived,
+ "title": detail.record.title,
+ "summary": detail.record.summary,
+ "tags": detail.record.tags,
+ "revision": detail.record.revision,
+ "updated_at": timestamp_value(detail.record.updated_at),
+ },
+ "frontier": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ },
+ "parents": detail.parents.len(),
+ "children": detail.children.len(),
+ "open_experiments": detail
+ .open_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ "latest_closed_experiment": detail
+ .closed_experiments
+ .first()
+ .map(experiment_summary_value),
+ "artifact_count": detail.artifacts.len(),
+ }))
+ }
+
+ fn hypothesis_detail_full_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::HypothesisDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "record": hypothesis_record_value(&detail.record),
+ "frontier": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ },
+ "parents": detail.parents.iter().map(vertex_summary_value).collect::<Vec<_>>(),
+ "children": detail.children.iter().map(vertex_summary_value).collect::<Vec<_>>(),
+ "open_experiments": detail
+ .open_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ "closed_experiments": detail
+ .closed_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ "artifacts": detail.artifacts.iter().map(artifact_summary_value).collect::<Vec<_>>(),
+ }))
+ }
+
+ fn experiment_summary_value(
+ experiment: &fidget_spinner_store_sqlite::ExperimentSummary,
+ ) -> Value {
+ json!({
+ "slug": experiment.slug,
+ "archived": experiment.archived,
+ "title": experiment.title,
+ "summary": experiment.summary,
+ "tags": experiment.tags,
+ "status": experiment.status,
+ "verdict": experiment.verdict,
+ "primary_metric": experiment
+ .primary_metric
+ .as_ref()
+ .map(metric_observation_summary_value),
+ "updated_at": timestamp_value(experiment.updated_at),
+ "closed_at": experiment.closed_at.map(timestamp_value),
+ })
+ }
+
+ fn experiment_record_value(experiment: &fidget_spinner_core::ExperimentRecord) -> Value {
+ json!({
+ "slug": experiment.slug,
+ "archived": experiment.archived,
+ "title": experiment.title,
+ "summary": experiment.summary,
+ "tags": experiment.tags,
+ "status": experiment.status,
+ "outcome": experiment.outcome.as_ref().map(experiment_outcome_value),
+ "revision": experiment.revision,
+ "created_at": timestamp_value(experiment.created_at),
+ "updated_at": timestamp_value(experiment.updated_at),
+ })
+ }
+
+ fn experiment_detail_concise_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::ExperimentDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "record": {
+ "slug": detail.record.slug,
+ "archived": detail.record.archived,
+ "title": detail.record.title,
+ "summary": detail.record.summary,
+ "tags": detail.record.tags,
+ "status": detail.record.status,
+ "verdict": detail.record.outcome.as_ref().map(|outcome| outcome.verdict),
+ "revision": detail.record.revision,
+ "updated_at": timestamp_value(detail.record.updated_at),
+ },
+ "frontier": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ },
+ "owning_hypothesis": hypothesis_summary_value(&detail.owning_hypothesis),
+ "parents": detail.parents.len(),
+ "children": detail.children.len(),
+ "artifact_count": detail.artifacts.len(),
+ "outcome": detail.record.outcome.as_ref().map(experiment_outcome_value),
+ }))
+ }
+
+ fn experiment_detail_full_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::ExperimentDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let frontier = store
+ .read_frontier(&detail.record.frontier_id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "record": experiment_record_value(&detail.record),
+ "frontier": {
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ },
+ "owning_hypothesis": hypothesis_summary_value(&detail.owning_hypothesis),
+ "parents": detail.parents.iter().map(vertex_summary_value).collect::<Vec<_>>(),
+ "children": detail.children.iter().map(vertex_summary_value).collect::<Vec<_>>(),
+ "artifacts": detail.artifacts.iter().map(artifact_summary_value).collect::<Vec<_>>(),
+ }))
+ }
+
+ fn artifact_summary_value(artifact: &fidget_spinner_store_sqlite::ArtifactSummary) -> Value {
+ json!({
+ "slug": artifact.slug,
+ "kind": artifact.kind,
+ "label": artifact.label,
+ "summary": artifact.summary,
+ "locator": artifact.locator,
+ "media_type": artifact.media_type,
+ "updated_at": timestamp_value(artifact.updated_at),
+ })
+ }
+
+ fn artifact_record_value(artifact: &fidget_spinner_core::ArtifactRecord) -> Value {
+ json!({
+ "slug": artifact.slug,
+ "kind": artifact.kind,
+ "label": artifact.label,
+ "summary": artifact.summary,
+ "locator": artifact.locator,
+ "media_type": artifact.media_type,
+ "revision": artifact.revision,
+ "created_at": timestamp_value(artifact.created_at),
+ "updated_at": timestamp_value(artifact.updated_at),
+ })
+ }
+
+ fn artifact_detail_concise_value(
+ detail: &fidget_spinner_store_sqlite::ArtifactDetail,
+ ) -> Value {
+ json!({
+ "record": {
+ "slug": detail.record.slug,
+ "kind": detail.record.kind,
+ "label": detail.record.label,
+ "summary": detail.record.summary,
+ "locator": detail.record.locator,
+ "media_type": detail.record.media_type,
+ "revision": detail.record.revision,
+ "updated_at": timestamp_value(detail.record.updated_at),
+ },
+ "attachment_count": detail.attachments.len(),
+ })
+ }
+
+ fn artifact_detail_full_value(
+ store: &ProjectStore,
+ detail: &fidget_spinner_store_sqlite::ArtifactDetail,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ let attachments = detail
+ .attachments
+ .iter()
+ .copied()
+ .map(|attachment| attachment_target_value(store, attachment, operation))
+ .collect::<Result<Vec<_>, FaultRecord>>()?;
+ Ok(json!({
+ "record": artifact_record_value(&detail.record),
+ "attachments": attachments,
+ }))
+ }
+
+ fn hypothesis_current_state_value(
+ state: &fidget_spinner_store_sqlite::HypothesisCurrentState,
+ ) -> Value {
+ json!({
+ "hypothesis": hypothesis_summary_value(&state.hypothesis),
+ "open_experiments": state
+ .open_experiments
+ .iter()
+ .map(experiment_summary_value)
+ .collect::<Vec<_>>(),
+ "latest_closed_experiment": state
+ .latest_closed_experiment
+ .as_ref()
+ .map(experiment_summary_value),
+ })
+ }
+
+ fn metric_key_summary_value(metric: &MetricKeySummary) -> Value {
+ json!({
+ "key": metric.key,
+ "unit": metric.unit,
+ "objective": metric.objective,
+ "visibility": metric.visibility,
+ "description": metric.description,
+ "reference_count": metric.reference_count,
+ })
+ }
+
+ fn metric_best_entry_value(entry: &MetricBestEntry) -> Value {
+ json!({
+ "experiment": experiment_summary_value(&entry.experiment),
+ "hypothesis": hypothesis_summary_value(&entry.hypothesis),
+ "value": entry.value,
+ "dimensions": dimension_map_value(&entry.dimensions),
+ })
+ }
+
+ fn metric_observation_summary_value(
+ metric: &fidget_spinner_store_sqlite::MetricObservationSummary,
+ ) -> Value {
+ json!({
+ "key": metric.key,
+ "value": metric.value,
+ "unit": metric.unit,
+ "objective": metric.objective,
+ })
+ }
+
+ fn experiment_outcome_value(outcome: &fidget_spinner_core::ExperimentOutcome) -> Value {
+ json!({
+ "backend": outcome.backend,
+ "command": command_recipe_value(&outcome.command),
+ "dimensions": dimension_map_value(&outcome.dimensions),
+ "primary_metric": metric_value_value(&outcome.primary_metric),
+ "supporting_metrics": outcome
+ .supporting_metrics
+ .iter()
+ .map(metric_value_value)
+ .collect::<Vec<_>>(),
+ "verdict": outcome.verdict,
+ "rationale": outcome.rationale,
+ "analysis": outcome.analysis.as_ref().map(experiment_analysis_value),
+ "closed_at": timestamp_value(outcome.closed_at),
+ })
+ }
+
+ fn experiment_analysis_value(analysis: &ExperimentAnalysis) -> Value {
+ json!({
+ "summary": analysis.summary,
+ "body": analysis.body,
+ })
+ }
+
+ fn metric_value_value(metric: &fidget_spinner_core::MetricValue) -> Value {
+ json!({
+ "key": metric.key,
+ "value": metric.value,
+ })
+ }
+
+ fn command_recipe_value(command: &CommandRecipe) -> Value {
+ json!({
+ "argv": command.argv,
+ "working_directory": command.working_directory,
+ "env": command.env,
+ })
+ }
+
+ fn dimension_map_value(dimensions: &BTreeMap<NonEmptyText, RunDimensionValue>) -> Value {
+ let mut object = Map::new();
+ for (key, value) in dimensions {
+ let _ = object.insert(key.to_string(), run_dimension_value(value));
+ }
+ Value::Object(object)
+ }
+
+ fn run_dimension_value(value: &RunDimensionValue) -> Value {
+ match value {
+ RunDimensionValue::String(value) => Value::String(value.to_string()),
+ RunDimensionValue::Numeric(value) => json!(value),
+ RunDimensionValue::Boolean(value) => json!(value),
+ RunDimensionValue::Timestamp(value) => Value::String(value.to_string()),
+ }
+ }
+
+ fn vertex_summary_value(vertex: &VertexSummary) -> Value {
+ json!({
+ "kind": vertex.vertex.kind().as_str(),
+ "slug": vertex.slug,
+ "archived": vertex.archived,
+ "title": vertex.title,
+ "summary": vertex.summary,
+ "updated_at": timestamp_value(vertex.updated_at),
+ })
+ }
+
+ fn attachment_target_value(
+ store: &ProjectStore,
+ attachment: AttachmentTargetRef,
+ operation: &str,
+ ) -> Result<Value, FaultRecord> {
+ match attachment {
+ AttachmentTargetRef::Frontier(id) => {
+ let frontier = store
+ .read_frontier(&id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "kind": "frontier",
+ "slug": frontier.slug,
+ "label": frontier.label,
+ "status": frontier.status,
+ }))
+ }
+ AttachmentTargetRef::Hypothesis(id) => {
+ let hypothesis = store
+ .read_hypothesis(&id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "kind": "hypothesis",
+ "slug": hypothesis.record.slug,
+ "title": hypothesis.record.title,
+ "summary": hypothesis.record.summary,
+ }))
+ }
+ AttachmentTargetRef::Experiment(id) => {
+ let experiment = store
+ .read_experiment(&id.to_string())
+ .map_err(store_fault(operation))?;
+ Ok(json!({
+ "kind": "experiment",
+ "slug": experiment.record.slug,
+ "title": experiment.record.title,
+ "summary": experiment.record.summary,
+ }))
+ }
+ }
+ }
+
+ fn timestamp_value(timestamp: time::OffsetDateTime) -> String {
+ timestamp
+ .format(&time::format_description::well_known::Rfc3339)
+ .unwrap_or_else(|_| timestamp.unix_timestamp().to_string())
+ }
+}