diff options
49 files changed, 15488 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..71757ce --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +/__pycache__ diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..577733e --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1865 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adequate-rust-mcp" +version = "1.0.0" +dependencies = [ + "libmcp", + "notify", + "ra-mcp-domain", + "ra-mcp-engine", + "rmcp", + "schemars", + "serde", + "serde_json", + "serial_test", + "tempfile", + "tokio", + "toml", + "tracing", + "tracing-subscriber", + "url", +] + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "assert_matches" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cc" +version = "1.2.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" +dependencies = [ + "find-msvc-tools", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "chrono" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "darling" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" +dependencies = [ + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "fluent-uri" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "inotify" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" +dependencies = [ + "bitflags 2.11.0", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "js-sys" +version = "0.3.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14dc6f6450b3f6d4ed5b16327f38fed626d375a886159ca555bd7822c0c3a5a6" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libc" +version = "0.2.182" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" + +[[package]] +name = "libmcp" +version = "1.1.0" +source = "git+https://git.swarm.moe/libmcp.git?rev=478b0bc47fade5864f4f397de7ea519beddab749#478b0bc47fade5864f4f397de7ea519beddab749" +dependencies = [ + "schemars", + "serde", + "serde_json", + "thiserror", + "tokio", + "url", +] + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "lsp-types" +version = "0.97.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071" +dependencies = [ + "bitflags 1.3.2", + "fluent-uri", + "serde", + "serde_json", + "serde_repr", +] + +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "notify" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" +dependencies = [ + "bitflags 2.11.0", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio", + "notify-types", + "walkdir", + "windows-sys 0.60.2", +] + +[[package]] +name = "notify-types" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42b8cfee0e339a0337359f3c88165702ac6e600dc01c0cc9579a92d62b08477a" +dependencies = [ + "bitflags 2.11.0", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "pastey" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b867cad97c0791bbd3aaa6472142568c6c9e8f71937e98379f584cfb0cf35bec" + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "ra-mcp-domain" +version = "1.0.0" +dependencies = [ + "assert_matches", + "serde", + "thiserror", +] + +[[package]] +name = "ra-mcp-engine" +version = "1.0.0" +dependencies = [ + "lsp-types", + "ra-mcp-domain", + "serde", + "serde_json", + "serial_test", + "tempfile", + "thiserror", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags 2.11.0", +] + +[[package]] +name = "ref-cast" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" + +[[package]] +name = "rmcp" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc4c9c94680f75470ee8083a0667988b5d7b5beb70b9f998a8e51de7c682ce60" +dependencies = [ + "async-trait", + "base64", + "chrono", + "futures", + "pastey", + "pin-project-lite", + "rmcp-macros", + "schemars", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "rmcp-macros" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90c23c8f26cae4da838fbc3eadfaecf2d549d97c04b558e7bd90526a9c28b42a" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "serde_json", + "syn", +] + +[[package]] +name = "rustix" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" +dependencies = [ + "bitflags 2.11.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scc" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46e6f046b7fef48e2660c57ed794263155d713de679057f2d0c169bfc6e756cc" +dependencies = [ + "sdd", +] + +[[package]] +name = "schemars" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc" +dependencies = [ + "chrono", + "dyn-clone", + "ref-cast", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d115b50f4aaeea07e79c1912f645c7513d81715d0420f8bc77a18c6260b307f" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sdd" +version = "3.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490dcfcbfef26be6800d11870ff2df8774fa6e86d047e3e8c8a76b25655e41ca" + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serial_test" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "911bd979bf1070a3f3aa7b691a3b3e9968f339ceeec89e08c280a8a22207a32f" +dependencies = [ + "futures-executor", + "futures-util", + "log", + "once_cell", + "parking_lot", + "scc", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a7d91949b85b0d2fb687445e448b40d322b6b3e4af6b44a29b21d9a5f33e6d9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tempfile" +version = "3.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82a72c767771b47409d2345987fda8628641887d5466101319899796367354a0" +dependencies = [ + "fastrand", + "getrandom", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.49.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen 0.46.0", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen 0.51.0", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.113" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60722a937f594b7fde9adb894d7c092fc1bb6612897c46368d18e7a20208eff2" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.113" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac8c6395094b6b91c4af293f4c79371c163f9a6f56184d2c9a85f5a95f3950" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.113" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3fabce6159dc20728033842636887e4877688ae94382766e00b180abac9d60" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.113" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0e091bdb824da87dc01d967388880d017a0a9bc4f3bdc0d86ee9f9336e3bb5" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.11.0", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..5a9cf67 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,130 @@ +[workspace] +members = [ + "crates/adequate-rust-mcp", + "crates/ra-mcp-domain", + "crates/ra-mcp-engine", +] +resolver = "3" + +[workspace.package] +categories = ["development-tools", "command-line-utilities"] +description = "Crash-resilient rust-analyzer MCP with a durable host, disposable worker, and porcelain-first analysis tools." +edition = "2024" +keywords = ["mcp", "rust-analyzer", "lsp", "ai", "tooling"] +license = "Apache-2.0" +readme = "README.md" +repository = "https://git.swarm.moe/adequate-rust-mcp.git" +rust-version = "1.94" +version = "1.0.0" + +[workspace.dependencies] +assert_matches = "1.5.0" +bytes = "1.10.1" +futures = "0.3.31" +libmcp = { git = "https://git.swarm.moe/libmcp.git", rev = "478b0bc47fade5864f4f397de7ea519beddab749" } +lsp-types = "0.97.0" +notify = "8.2.0" +rmcp = { version = "0.16.0", features = ["server", "macros", "transport-io"] } +schemars = "1.0.4" +serde = { version = "1.0.219", features = ["derive"] } +serde_json = "1.0.143" +serial_test = "3.2.0" +tempfile = "3.22.0" +thiserror = "2.0.16" +tokio = { version = "1.47.1", features = ["full"] } +tokio-util = { version = "0.7.16", features = ["codec"] } +tracing = "0.1.41" +tracing-subscriber = { version = "0.3.20", features = ["env-filter"] } +toml = "0.8.23" +url = "2.5.7" +uuid = { version = "1.18.1", features = ["v4", "serde"] } + +[workspace.lints.rust] +elided_lifetimes_in_paths = "deny" +missing_docs = "deny" +unsafe_code = "deny" +unused_crate_dependencies = "warn" +unused_lifetimes = "deny" +unused_qualifications = "deny" +unused_results = "deny" + +[workspace.lints.clippy] +# Strict baseline. +all = { level = "deny", priority = -2 } +pedantic = { level = "deny", priority = -1 } +cargo = { level = "warn", priority = -3 } + +# Hard bans. +dbg_macro = "deny" +panic = "deny" +todo = "deny" +unimplemented = "deny" +unwrap_used = "deny" + +# Carve-out: test fixtures rely on concise expect messages. +expect_used = "allow" + +# Carve-out: dependency graph churn is external to this repository. +multiple_crate_versions = "allow" + +# Carve-out: rustdoc policy is intentionally lightweight here. +missing_errors_doc = "allow" +missing_panics_doc = "allow" + +# Carve-out: threshold/style lints that create churn without correctness wins. +items_after_statements = "allow" +many_single_char_names = "allow" +module_name_repetitions = "allow" +similar_names = "allow" +struct_field_names = "allow" +too_many_arguments = "allow" +too_many_lines = "allow" +unnested_or_patterns = "allow" +match_same_arms = "allow" + +# Carve-out: API-shape micro-optimizations are not worth readability cost. +needless_pass_by_value = "allow" +ref_option = "allow" +trivially_copy_pass_by_ref = "allow" +unused_async = "allow" + +# Carve-out: numeric ergonomics and formatting are intentionally pragmatic. +cast_lossless = "allow" +cast_possible_truncation = "allow" +cast_possible_wrap = "allow" +cast_precision_loss = "allow" +cast_sign_loss = "allow" +float_cmp = "allow" +implicit_hasher = "allow" +manual_let_else = "allow" +map_unwrap_or = "allow" +uninlined_format_args = "allow" + +# Carve-out: local scaffolding patterns preferred in this codebase. +ignored_unit_patterns = "allow" +must_use_candidate = "allow" +no_effect_underscore_binding = "allow" +redundant_closure_for_method_calls = "allow" +return_self_not_must_use = "allow" +used_underscore_binding = "allow" + +[workspace.metadata.adequate-rust-mcp] +format_command = ["cargo", "fmt", "--all"] +clippy_command = [ + "cargo", + "clippy", + "--workspace", + "--all-targets", + "--all-features", + "--message-format=json", +] +fix_command = [ + "cargo", + "clippy", + "--fix", + "--workspace", + "--all-targets", + "--all-features", + "--allow-dirty", + "--allow-staged", +] diff --git a/README.md b/README.md new file mode 100644 index 0000000..640056e --- /dev/null +++ b/README.md @@ -0,0 +1,221 @@ +# adequate-rust-mcp + +Industrial-grade, self-healing MCP server for `rust-analyzer`. + +## Dependency Note + +This workspace currently consumes `libmcp` from the public swarm Git surface: + +- `https://git.swarm.moe/libmcp.git` + +Cargo will fetch that dependency automatically on first build. + +## Crates + +- `ra-mcp-domain`: strict domain types and lifecycle algebra +- `ra-mcp-engine`: resilient rust-analyzer transport + supervisor +- `adequate-rust-mcp`: MCP stdio server exposing analysis tools + +## Guarantees + +- automatic recovery from broken pipes and dead workers +- bounded request/startup deadlines +- typed boundaries for lifecycle, faults, and source positions +- fault-injection integration tests for restart behavior + +## Runtime Modes + +- default (`adequate-rust-mcp`): stable host process (Codex-facing stdio endpoint) +- worker (`adequate-rust-mcp --worker`): actual MCP tool server process + +The host supervises worker generations, drains pending requests with explicit JSON-RPC +errors on worker loss, and automatically respawns. + +## Quick Start + +From the workspace root: + +```bash +cargo run -p adequate-rust-mcp +``` + +To launch the disposable worker directly: + +```bash +cargo run -p adequate-rust-mcp -- --worker +``` + +## Live Reload + +Host mode watches the worker binary path for updates. On Unix it self-reexecs into +the new binary and restores live session state before spawning the next worker generation; +on other platforms it falls back to worker-only restart. It replays `initialize` and +reconstructs `notifications/initialized` if needed so replacement workers come up fully live. +In-flight requests are replayed across host/worker restarts with bounded at-least-once semantics, +so transient rollout and recovery churn is hidden from clients by default. + +Environment knobs: + +- `ADEQUATE_MCP_WORKER_BINARY`: worker binary path to execute/watch (default: current executable) +- `ADEQUATE_MCP_HOST_RELOAD_DEBOUNCE_MS`: file-change debounce before reload (default: `250`) +- `ADEQUATE_MCP_HOST_RESPAWN_FLOOR_MS`: initial respawn backoff floor (default: `100`) +- `ADEQUATE_MCP_HOST_RESPAWN_CEILING_MS`: respawn backoff ceiling (default: `3000`) +- `ADEQUATE_MCP_HOST_QUEUE_CAPACITY`: max queued inbound frames while recovering (default: `512`) +- `ADEQUATE_MCP_HOST_MAX_REPLAY_ATTEMPTS`: replay budget per in-flight request before surfacing an error (default: `8`) + +## Tool Surface + +In addition to typed core tools (`hover`, `definition`, `references`, `rename_symbol`, +`diagnostics`, `clippy_diagnostics`, `fix_everything`, `health_snapshot`, `telemetry_snapshot`), the server exposes +`advanced_lsp_request` for +editing-heavy and advanced rust-analyzer capabilities: + +- completions +- completion/resolve +- code actions + resolve +- code lens + resolve +- prepare rename +- rename +- execute command +- formatting (document/range/on-type) +- symbols + workspace symbol resolve +- document links + resolve +- colors + color presentation +- linked editing range, inline value, moniker +- document/workspace diagnostics +- selection/folding/inlay hints + inlay resolve +- semantic tokens (full/range/full-delta), call hierarchy, type hierarchy + +`hover`, `definition`, `references`, `health_snapshot`, and `telemetry_snapshot` +also default to line-oriented porcelain text with `render=json` opt-in. + +`diagnostics` and `clippy_diagnostics` split detail from presentation: + +- `mode`: `compact` (default), `full`, or `summary` +- `render`: `porcelain` (default line-oriented text) or `json` +- default responses are model-friendly porcelain text in `content` +- `render=json` exposes the structured payload and schema for programmatic consumers +- empty `definition`/`references`/`hover` results may include an indexing note during early rust-analyzer warm-up when the empty result could be a false negative +- transient early-startup `unlinked-file` / `not part of any crate` diagnostics are retried during the same warm-up window before surfacing to clients + +## One-Stop Autofix + +`fix_everything` is the do-the-obvious thing button for agent loops. +It runs, in order: + +- workspace formatter command (default: `cargo fmt --all`) +- workspace clippy fix command (default: `cargo clippy --fix --workspace --all-targets --all-features --allow-dirty --allow-staged`) + +The output is structured and model-friendly: + +- `success`: whether all steps exited with status `0` +- `workspace_root`: resolved workspace root path +- `steps`: per-step command argv, exit code, and bounded stdout/stderr excerpts + +## Input Normalization + +User-facing tool inputs are intentionally permissive and normalized at the boundary: + +- `file_path` accepts absolute paths, `file://` URIs, and relative paths (resolved against workspace root) +- position indices accept numeric strings and integer-like floats +- `0` indices are normalized to `1` for one-indexed fields +- alias spellings are accepted for common fields (`filePath`, `lineNumber`, `character`, `newName`, etc.) +- `advanced_lsp_request.method` accepts snake_case, camelCase, kebab-case, and full LSP method paths +- `diagnostics.mode` accepts `compact` (default), `full` (aliases: `raw`, `verbose`), or `summary` +- `diagnostics.render` accepts `porcelain` (default; aliases: `text`, `plain`, `plain_text`) or `json` (alias: `structured`) +- `diagnostics.max_items` and `diagnostics.max_message_chars` accept numeric strings and integer-like floats +- `diagnostics` and `clippy_diagnostics` accept either a single file (`file_path`) or many files (`file_paths`/`files`/`paths`) +- `clippy_diagnostics` accepts the same render controls as `diagnostics` +- multi-file diagnostics preserve requested file order and return one fused diagnostics list +- `render=json` compact diagnostics use descriptive field names (`severity`, `file_path`, `start_line`, etc.) and descriptive counters (`error_count`, `total_count`, ...) + +## Clippy Gate Integration + +`clippy_diagnostics` runs `cargo clippy` and filters results to the requested file. +Strictness lives in workspace lint configuration: + +- `[workspace.lints.clippy]` in `Cargo.toml` is the canonical policy (`all` + `pedantic` with explicit carve-outs) +- `./check.py` runs `cargo clippy` without inline lint flags, so commit-gate behavior is sourced from `Cargo.toml` + +The MCP worker executes: + +- `cargo clippy --workspace --all-targets --all-features --message-format=json` + +To override command shape (for example, alternate target selection), configure: + +```toml +[workspace.metadata.adequate-rust-mcp] +format_command = ["cargo", "fmt", "--all"] +clippy_command = [ + "cargo", + "clippy", + "--workspace", + "--all-targets", + "--all-features", + "--message-format=json", +] +fix_command = [ + "cargo", + "clippy", + "--fix", + "--workspace", + "--all-targets", + "--all-features", + "--allow-dirty", + "--allow-staged", +] +``` + +## Telemetry + +Use `telemetry_snapshot` as first-line triage when the system slows down or starts flapping. +It reports: + +- lifecycle state + generation +- uptime +- consecutive failures and restart count +- total request counters (success, response error, transport fault, retries) +- per-method counters and latency aggregates (`last`, `avg`, `max`) +- last observed method-level error text +- latest restart-triggering fault + +Useful choke signatures: + +- rising `transport_fault_count` + `restart_count`: rust-analyzer process instability or I/O link failure +- rising `response_error_count` with low restart count: semantic/query misuse or LSP-level request issues +- high `avg_latency_ms`/`max_latency_ms` in one method: method-specific hot spot in large workspaces +- non-zero `consecutive_failures` while state is `recovering`: active degradation loop + +## Host JSONL Telemetry (XDG State) + +The host process also emits append-only JSONL telemetry for path-level heat/error analysis. + +- default path: `$XDG_STATE_HOME/adequate-rust-mcp/telemetry.jsonl` +- fallback path: `$HOME/.local/state/adequate-rust-mcp/telemetry.jsonl` +- snapshot cadence: `ADEQUATE_MCP_TELEMETRY_SNAPSHOT_EVERY` (default: `100`) + +Each tool call writes a `tool_call` event with: + +- `repo_root` +- `request_id` +- `tool_name` and optional `lsp_method` +- `path_hint` (best-effort normalized path extraction) +- `latency_ms`, `replay_attempts` +- `outcome` (`ok`/`error`) +- optional `error_code`, `error_kind`, `error_message` + +Every N calls, and once on shutdown, the host writes `hot_paths_snapshot` with: + +- `hottest_paths` (highest request volume) +- `slowest_paths` (highest average latency) +- `error_count` per path + +## QA Checklist + +For a lean live MCP smoke pass against this repo, run: + +```bash +python3 qa_checklist.py +``` + +The literal step list lives in `docs/qa-checklist.md`. diff --git a/check.py b/check.py new file mode 100755 index 0000000..2b33dbf --- /dev/null +++ b/check.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import subprocess +import tomllib +from pathlib import Path + +ROOT = Path(__file__).resolve().parent +WORKSPACE_MANIFEST = ROOT / "Cargo.toml" +DEFAULT_CLIPPY = ["cargo", "clippy", "--workspace", "--all-targets", "--all-features"] + + +def run(argv: list[str]) -> None: + print("+", " ".join(argv), flush=True) + proc = subprocess.run(argv, cwd=ROOT) + if proc.returncode != 0: + raise SystemExit(proc.returncode) + + +def normalized_clippy_command() -> list[str]: + workspace = tomllib.loads(WORKSPACE_MANIFEST.read_text(encoding="utf-8")) + metadata = ( + workspace.get("workspace", {}) + .get("metadata", {}) + .get("adequate-rust-mcp", {}) + ) + configured = metadata.get("clippy_command") + if not ( + isinstance(configured, list) + and configured + and all(isinstance(part, str) for part in configured) + ): + return DEFAULT_CLIPPY + + # Human check output does not need machine JSON framing. + command: list[str] = [] + skip_next = False + for index, part in enumerate(configured): + if skip_next: + skip_next = False + continue + if part.startswith("--message-format="): + continue + if part == "--message-format" and index + 1 < len(configured): + skip_next = True + continue + command.append(part) + return command or DEFAULT_CLIPPY + + +def main() -> None: + run(["cargo", "fmt", "--all", "--check"]) + run(normalized_clippy_command()) + run(["cargo", "test", "--workspace", "--all-targets", "--all-features"]) + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + raise SystemExit(130) diff --git a/crates/adequate-rust-mcp/.gitignore b/crates/adequate-rust-mcp/.gitignore new file mode 100644 index 0000000..ea8c4bf --- /dev/null +++ b/crates/adequate-rust-mcp/.gitignore @@ -0,0 +1 @@ +/target diff --git a/crates/adequate-rust-mcp/Cargo.toml b/crates/adequate-rust-mcp/Cargo.toml new file mode 100644 index 0000000..9702887 --- /dev/null +++ b/crates/adequate-rust-mcp/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "adequate-rust-mcp" +categories.workspace = true +description = "Durable host/worker MCP server for rust-analyzer with replay-aware recovery and porcelain-first tool output." +edition.workspace = true +keywords.workspace = true +license.workspace = true +readme.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[dependencies] +libmcp.workspace = true +ra-mcp-domain = { path = "../ra-mcp-domain" } +ra-mcp-engine = { path = "../ra-mcp-engine" } +notify.workspace = true +rmcp.workspace = true +schemars.workspace = true +serde.workspace = true +serde_json.workspace = true +tokio.workspace = true +toml.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +url.workspace = true + +[dev-dependencies] +serial_test.workspace = true +tempfile.workspace = true + +[lints] +workspace = true diff --git a/crates/adequate-rust-mcp/src/host/binary.rs b/crates/adequate-rust-mcp/src/host/binary.rs new file mode 100644 index 0000000..0e35d64 --- /dev/null +++ b/crates/adequate-rust-mcp/src/host/binary.rs @@ -0,0 +1,54 @@ +use notify::{Event, EventKind}; +use std::{ + io, + path::Path, + time::{Duration, SystemTime}, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) struct BinaryFingerprint { + length: u64, + modified_nanos_since_epoch: u128, +} + +impl BinaryFingerprint { + pub(super) fn capture(path: &Path) -> io::Result<Self> { + let metadata = std::fs::metadata(path)?; + let modified = metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH); + let nanos = modified + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or(Duration::ZERO) + .as_nanos(); + Ok(Self { + length: metadata.len(), + modified_nanos_since_epoch: nanos, + }) + } +} + +pub(super) fn should_consider_watch_event(event: &Event) -> bool { + matches!( + event.kind, + EventKind::Create(_) + | EventKind::Modify(_) + | EventKind::Remove(_) + | EventKind::Any + | EventKind::Other + ) +} + +pub(super) fn event_targets_binary( + event: &Event, + worker_binary: &Path, + worker_binary_canonical: Option<&Path>, +) -> bool { + if event.paths.is_empty() { + return false; + } + event.paths.iter().any(|path| { + path == worker_binary + || std::fs::canonicalize(path).ok().is_some_and(|candidate| { + worker_binary_canonical.is_some_and(|known| known == candidate) + }) + }) +} diff --git a/crates/adequate-rust-mcp/src/host/config.rs b/crates/adequate-rust-mcp/src/host/config.rs new file mode 100644 index 0000000..d00c60e --- /dev/null +++ b/crates/adequate-rust-mcp/src/host/config.rs @@ -0,0 +1,195 @@ +use std::{cmp::min, io, path::PathBuf, time::Duration}; + +const DEFAULT_QUEUE_CAPACITY: usize = 512; +pub(super) const EVENT_TICK_INTERVAL: Duration = Duration::from_millis(50); +const DEFAULT_RELOAD_DEBOUNCE: Duration = Duration::from_millis(250); +const DEFAULT_RESPAWN_FLOOR: Duration = Duration::from_millis(100); +const DEFAULT_RESPAWN_CEILING: Duration = Duration::from_secs(3); +const DEFAULT_MAX_REPLAY_ATTEMPTS: u8 = 8; +const DEFAULT_TELEMETRY_SNAPSHOT_EVERY: u64 = 100; + +#[derive(Debug, Clone)] +pub(super) struct HostConfig { + pub(super) worker_binary: PathBuf, + pub(super) workspace_root: PathBuf, + pub(super) telemetry_path: PathBuf, + pub(super) telemetry_snapshot_every: u64, + pub(super) queue_capacity: usize, + pub(super) reload_debounce: Duration, + pub(super) respawn_floor: Duration, + pub(super) respawn_ceiling: Duration, + pub(super) max_replay_attempts: u8, +} + +impl HostConfig { + pub(super) fn from_env() -> io::Result<Self> { + let worker_binary = read_worker_binary_from_env()?; + let workspace_root = read_workspace_root_from_env()?; + let telemetry_path = resolve_telemetry_jsonl_path()?; + let telemetry_snapshot_every = read_u64_env( + "ADEQUATE_MCP_TELEMETRY_SNAPSHOT_EVERY", + DEFAULT_TELEMETRY_SNAPSHOT_EVERY, + ); + let queue_capacity = + read_usize_env("ADEQUATE_MCP_HOST_QUEUE_CAPACITY", DEFAULT_QUEUE_CAPACITY); + let reload_debounce = read_duration_env( + "ADEQUATE_MCP_HOST_RELOAD_DEBOUNCE_MS", + DEFAULT_RELOAD_DEBOUNCE, + ); + let respawn_floor = + read_duration_env("ADEQUATE_MCP_HOST_RESPAWN_FLOOR_MS", DEFAULT_RESPAWN_FLOOR); + let respawn_ceiling = read_duration_env( + "ADEQUATE_MCP_HOST_RESPAWN_CEILING_MS", + DEFAULT_RESPAWN_CEILING, + ); + let max_replay_attempts = read_u8_env( + "ADEQUATE_MCP_HOST_MAX_REPLAY_ATTEMPTS", + DEFAULT_MAX_REPLAY_ATTEMPTS, + ); + Ok(Self { + worker_binary, + workspace_root, + telemetry_path, + telemetry_snapshot_every, + queue_capacity, + reload_debounce, + respawn_floor, + respawn_ceiling, + max_replay_attempts, + }) + } +} + +#[derive(Debug, Clone)] +pub(super) struct RespawnBackoff { + floor: Duration, + ceiling: Duration, + next_delay: Duration, +} + +impl RespawnBackoff { + pub(super) fn new(floor: Duration, ceiling: Duration) -> Self { + Self { + floor, + ceiling, + next_delay: floor, + } + } + + pub(super) fn reset(&mut self) { + self.next_delay = self.floor; + } + + pub(super) fn consume_delay(&mut self) -> Duration { + let current = self.next_delay; + let doubled = current.checked_mul(2).unwrap_or(self.ceiling); + self.next_delay = min(doubled, self.ceiling); + current + } +} + +fn read_worker_binary_from_env() -> io::Result<PathBuf> { + let from_env = std::env::var_os("ADEQUATE_MCP_WORKER_BINARY"); + match from_env { + Some(value) if !value.is_empty() => Ok(PathBuf::from(value)), + Some(_) | None => std::env::current_exe(), + } +} + +fn read_workspace_root_from_env() -> io::Result<PathBuf> { + let from_env = std::env::var_os("ADEQUATE_MCP_WORKSPACE_ROOT"); + match from_env { + Some(value) if !value.is_empty() => Ok(PathBuf::from(value)), + Some(_) | None => std::env::current_dir(), + } +} + +fn resolve_telemetry_jsonl_path() -> io::Result<PathBuf> { + let state_home = telemetry_state_home( + std::env::var_os("XDG_STATE_HOME").map(PathBuf::from), + std::env::var_os("HOME").map(PathBuf::from), + ); + let Some(state_home) = state_home else { + return Err(io::Error::new( + io::ErrorKind::NotFound, + "cannot resolve XDG state home for telemetry log", + )); + }; + Ok(state_home.join("adequate-rust-mcp").join("telemetry.jsonl")) +} + +fn telemetry_state_home(xdg_state_home: Option<PathBuf>, home: Option<PathBuf>) -> Option<PathBuf> { + xdg_state_home.or_else(|| home.map(|home| home.join(".local").join("state"))) +} + +fn read_duration_env(name: &str, default: Duration) -> Duration { + let raw = std::env::var(name); + let Ok(raw) = raw else { + return default; + }; + let parsed = raw.parse::<u64>(); + let Ok(value) = parsed else { + return default; + }; + Duration::from_millis(value) +} + +fn read_usize_env(name: &str, default: usize) -> usize { + let raw = std::env::var(name); + let Ok(raw) = raw else { + return default; + }; + let parsed = raw.parse::<usize>(); + let Ok(value) = parsed else { + return default; + }; + value +} + +fn read_u8_env(name: &str, default: u8) -> u8 { + let raw = std::env::var(name); + let Ok(raw) = raw else { + return default; + }; + let parsed = raw.parse::<u8>(); + let Ok(value) = parsed else { + return default; + }; + value +} + +fn read_u64_env(name: &str, default: u64) -> u64 { + let raw = std::env::var(name); + let Ok(raw) = raw else { + return default; + }; + let parsed = raw.parse::<u64>(); + let Ok(value) = parsed else { + return default; + }; + value +} + +#[cfg(test)] +mod tests { + use super::telemetry_state_home; + use std::path::PathBuf; + + #[test] + fn telemetry_path_prefers_xdg_state_home() { + let path = telemetry_state_home( + Some(PathBuf::from("/xdg-state")), + Some(PathBuf::from("/home/main")), + ); + assert_eq!(path, Some(PathBuf::from("/xdg-state"))); + } + + #[test] + fn telemetry_path_falls_back_to_home_local_state() { + let path = telemetry_state_home(None, Some(PathBuf::from("/home/main"))); + assert_eq!( + path, + Some(PathBuf::from("/home/main").join(".local").join("state")) + ); + } +} diff --git a/crates/adequate-rust-mcp/src/host/mod.rs b/crates/adequate-rust-mcp/src/host/mod.rs new file mode 100644 index 0000000..b5209a6 --- /dev/null +++ b/crates/adequate-rust-mcp/src/host/mod.rs @@ -0,0 +1,12 @@ +//! Stable host process that proxies MCP frames to a hot-swappable worker process. + +mod binary; +mod config; +mod process; +mod protocol; +mod runtime; +mod telemetry; +#[cfg(test)] +mod tests; + +pub(crate) use runtime::run_host; diff --git a/crates/adequate-rust-mcp/src/host/process.rs b/crates/adequate-rust-mcp/src/host/process.rs new file mode 100644 index 0000000..60296df --- /dev/null +++ b/crates/adequate-rust-mcp/src/host/process.rs @@ -0,0 +1,253 @@ +use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher}; +use std::{io, path::Path, process::Stdio}; +use tokio::{ + io::BufReader, + process::{Child, ChildStdin, ChildStdout, Command}, + sync::mpsc, + task::JoinHandle, +}; +use tracing::debug; + +use super::protocol::{FrameReadOutcome, FramedMessage, RequestId, read_frame}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub(super) struct WorkerGeneration(u64); + +impl WorkerGeneration { + pub(super) const fn get(self) -> u64 { + self.0 + } +} + +#[derive(Debug, Default)] +pub(super) struct GenerationCounter { + current: u64, +} + +impl GenerationCounter { + pub(super) fn next(&mut self) -> WorkerGeneration { + self.current = self.current.saturating_add(1); + WorkerGeneration(self.current) + } +} + +#[derive(Debug, Clone)] +pub(super) enum ActiveWorkerReadiness { + Ready, + ReplayingInitialize { + request_id: RequestId, + initialized_notification: Option<Vec<u8>>, + }, +} + +#[derive(Debug)] +pub(super) struct ActiveWorker { + pub(super) process: WorkerProcess, + pub(super) readiness: ActiveWorkerReadiness, +} + +impl ActiveWorker { + pub(super) fn generation(&self) -> WorkerGeneration { + self.process.generation + } + + pub(super) fn is_ready(&self) -> bool { + matches!(self.readiness, ActiveWorkerReadiness::Ready) + } +} + +#[derive(Debug)] +pub(super) struct WorkerProcess { + generation: WorkerGeneration, + child: Child, + pub(super) stdin: ChildStdin, + reader_task: JoinHandle<()>, +} + +impl WorkerProcess { + pub(super) async fn terminate(mut self) { + self.reader_task.abort(); + if self.child.id().is_some() { + let kill_result = self.child.kill().await; + if let Err(error) = kill_result { + debug!( + generation = self.generation.get(), + "failed to kill worker process cleanly: {error}" + ); + } + } + let wait_result = self.child.wait().await; + if let Err(error) = wait_result { + debug!( + generation = self.generation.get(), + "failed to wait for worker process: {error}" + ); + } + } +} + +#[derive(Debug)] +pub(super) enum HostEvent { + ClientFrame(FramedMessage), + ClientClosed, + ClientFault(io::Error), + WorkerFrame { + generation: WorkerGeneration, + frame: FramedMessage, + }, + WorkerFault { + generation: WorkerGeneration, + error: io::Error, + }, + Watcher(notify::Result<Event>), +} + +pub(super) fn spawn_binary_watcher( + worker_binary: &Path, + event_tx: mpsc::UnboundedSender<HostEvent>, +) -> io::Result<RecommendedWatcher> { + let parent = worker_binary.parent().ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!( + "worker binary path has no parent directory: {}", + worker_binary.display() + ), + ) + })?; + let callback_tx = event_tx.clone(); + let watcher_result = notify::recommended_watcher(move |event| { + let sent = callback_tx.send(HostEvent::Watcher(event)); + if let Err(error) = sent { + drop(error); + } + }); + let mut watcher = watcher_result.map_err(|error| io::Error::other(error.to_string()))?; + let watch_result = watcher.watch(parent, RecursiveMode::NonRecursive); + watch_result.map_err(|error| io::Error::other(error.to_string()))?; + Ok(watcher) +} + +pub(super) async fn spawn_worker_process( + worker_binary: &Path, + generation: WorkerGeneration, + event_tx: mpsc::UnboundedSender<HostEvent>, +) -> io::Result<WorkerProcess> { + let mut command = Command::new(worker_binary); + let _configured = command + .arg("--worker") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::inherit()); + + let spawn = command.spawn(); + let mut child = spawn?; + + let stdin = child.stdin.take().ok_or_else(|| { + io::Error::new( + io::ErrorKind::BrokenPipe, + "worker process is missing stdin pipe", + ) + })?; + let stdout = child.stdout.take().ok_or_else(|| { + io::Error::new( + io::ErrorKind::BrokenPipe, + "worker process is missing stdout pipe", + ) + })?; + let reader_task = spawn_worker_reader(stdout, generation, event_tx); + + Ok(WorkerProcess { + generation, + child, + stdin, + reader_task, + }) +} + +pub(super) fn spawn_client_reader(event_tx: mpsc::UnboundedSender<HostEvent>) -> JoinHandle<()> { + tokio::spawn(async move { + let stdin = tokio::io::stdin(); + let mut reader = BufReader::new(stdin); + loop { + match read_frame(&mut reader).await { + Ok(FrameReadOutcome::Frame(payload)) => match FramedMessage::parse(payload) { + Ok(frame) => { + let sent = event_tx.send(HostEvent::ClientFrame(frame)); + if let Err(error) = sent { + drop(error); + break; + } + } + Err(error) => { + let sent = event_tx.send(HostEvent::ClientFault(error)); + if let Err(send_error) = sent { + drop(send_error); + } + break; + } + }, + Ok(FrameReadOutcome::EndOfStream) => { + let sent = event_tx.send(HostEvent::ClientClosed); + if let Err(error) = sent { + drop(error); + } + break; + } + Err(error) => { + let sent = event_tx.send(HostEvent::ClientFault(error)); + if let Err(send_error) = sent { + drop(send_error); + } + break; + } + } + } + }) +} + +fn spawn_worker_reader( + stdout: ChildStdout, + generation: WorkerGeneration, + event_tx: mpsc::UnboundedSender<HostEvent>, +) -> JoinHandle<()> { + tokio::spawn(async move { + let mut reader = BufReader::new(stdout); + loop { + match read_frame(&mut reader).await { + Ok(FrameReadOutcome::Frame(payload)) => match FramedMessage::parse(payload) { + Ok(frame) => { + let sent = event_tx.send(HostEvent::WorkerFrame { generation, frame }); + if let Err(error) = sent { + drop(error); + break; + } + } + Err(error) => { + let sent = event_tx.send(HostEvent::WorkerFault { generation, error }); + if let Err(send_error) = sent { + drop(send_error); + } + break; + } + }, + Ok(FrameReadOutcome::EndOfStream) => { + let error = + io::Error::new(io::ErrorKind::UnexpectedEof, "worker stdout reached EOF"); + let sent = event_tx.send(HostEvent::WorkerFault { generation, error }); + if let Err(send_error) = sent { + drop(send_error); + } + break; + } + Err(error) => { + let sent = event_tx.send(HostEvent::WorkerFault { generation, error }); + if let Err(send_error) = sent { + drop(send_error); + } + break; + } + } + } + }) +} diff --git a/crates/adequate-rust-mcp/src/host/protocol.rs b/crates/adequate-rust-mcp/src/host/protocol.rs new file mode 100644 index 0000000..43742b0 --- /dev/null +++ b/crates/adequate-rust-mcp/src/host/protocol.rs @@ -0,0 +1,61 @@ +use libmcp::{ReplayContract, normalize_ascii_token}; +use serde_json::Value; + +pub(super) use libmcp::ReplayContract as EffectReplayContract; +pub(super) use libmcp::{ + FrameReadOutcome, FramedMessage, RequestId, RpcEnvelopeKind, parse_tool_call_meta, read_frame, + write_frame, +}; + +pub(super) fn classify_replay_contract( + frame: &FramedMessage, + rpc_method: &str, +) -> EffectReplayContract { + if rpc_method != "tools/call" { + return ReplayContract::Convergent; + } + + let tool_name = frame + .value + .get("params") + .and_then(Value::as_object) + .and_then(|params| params.get("name")) + .and_then(Value::as_str); + let Some(tool_name) = tool_name else { + return ReplayContract::ProbeRequired; + }; + + match normalize_ascii_token(tool_name).as_str() { + "hover" | "definition" | "references" | "renamesymbol" | "diagnostics" + | "clippydiagnostics" | "healthsnapshot" | "telemetrysnapshot" => { + ReplayContract::Convergent + } + "fixeverything" => ReplayContract::ProbeRequired, + "advancedlsprequest" => classify_advanced_lsp_replay_contract(frame), + _ => ReplayContract::ProbeRequired, + } +} + +fn classify_advanced_lsp_replay_contract(frame: &FramedMessage) -> EffectReplayContract { + let method = frame + .value + .get("params") + .and_then(Value::as_object) + .and_then(|params| params.get("arguments")) + .and_then(Value::as_object) + .and_then(|arguments| { + arguments + .get("method") + .or_else(|| arguments.get("lsp_method")) + .or_else(|| arguments.get("lspMethod")) + }) + .and_then(Value::as_str); + let Some(method) = method else { + return ReplayContract::ProbeRequired; + }; + + match normalize_ascii_token(method).as_str() { + "workspaceexecutecommand" => ReplayContract::ProbeRequired, + _ => ReplayContract::Convergent, + } +} diff --git a/crates/adequate-rust-mcp/src/host/runtime.rs b/crates/adequate-rust-mcp/src/host/runtime.rs new file mode 100644 index 0000000..34b45cf --- /dev/null +++ b/crates/adequate-rust-mcp/src/host/runtime.rs @@ -0,0 +1,696 @@ +use libmcp::{ + HostRejection, HostSessionKernel, HostSessionKernelSnapshot, PendingRequest, ReplayBudget, + RestoredHostSessionKernel, load_snapshot_file_from_env, remove_snapshot_file, + write_snapshot_file, +}; +use notify::{Event, RecommendedWatcher}; +use serde_json::{Value, json}; +#[cfg(unix)] +use std::os::unix::process::CommandExt; +use std::{ + fs, io, + path::{Path, PathBuf}, + process::Command, +}; +use tokio::{sync::mpsc, task::JoinHandle, time::Instant}; +use tracing::{Level, debug, info, warn}; + +use super::{ + binary::{BinaryFingerprint, event_targets_binary, should_consider_watch_event}, + config::{EVENT_TICK_INTERVAL, HostConfig, RespawnBackoff}, + process::{ + ActiveWorker, ActiveWorkerReadiness, GenerationCounter, HostEvent, WorkerGeneration, + spawn_binary_watcher, spawn_client_reader, spawn_worker_process, + }, + protocol::{ + FramedMessage, RequestId, RpcEnvelopeKind, classify_replay_contract, parse_tool_call_meta, + write_frame, + }, + telemetry::{ + TelemetryLog, ToolOutcome, classify_tool_outcome_from_response, duration_millis_u64, + host_tool_error_detail, + }, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum HostControl { + Continue, + Exit, +} + +const HOST_REEXEC_STATE_ENV: &str = "ADEQUATE_MCP_HOST_REEXEC_STATE"; + +#[cfg(unix)] +fn reexec_host_process(binary: &Path, state_path: &Path) -> io::Result<()> { + let error = Command::new(binary) + .env(HOST_REEXEC_STATE_ENV, state_path) + .exec(); + Err(error) +} + +#[cfg(not(unix))] +fn reexec_host_process(_binary: &Path, _state_path: &Path) -> io::Result<()> { + Err(io::Error::new( + io::ErrorKind::Unsupported, + "host self-reexec requires unix process exec support", + )) +} + +struct HostRuntime { + config: HostConfig, + telemetry: TelemetryLog, + events: mpsc::UnboundedReceiver<HostEvent>, + event_tx: mpsc::UnboundedSender<HostEvent>, + client_reader_task: JoinHandle<()>, + watcher: RecommendedWatcher, + client_stdout: tokio::io::Stdout, + worker: Option<ActiveWorker>, + session_kernel: HostSessionKernel, + generation_counter: GenerationCounter, + respawn_backoff: RespawnBackoff, + respawn_due: Option<Instant>, + reload_requested_at: Option<Instant>, + binary_fingerprint: Option<BinaryFingerprint>, + worker_binary_canonical: Option<PathBuf>, +} + +impl HostRuntime { + async fn run(&mut self) -> io::Result<()> { + let mut ticker = tokio::time::interval(EVENT_TICK_INTERVAL); + loop { + tokio::select! { + _ = ticker.tick() => { + self.on_tick().await?; + } + maybe_event = self.events.recv() => { + let Some(event) = maybe_event else { + break; + }; + let control = self.handle_event(event).await?; + if control == HostControl::Exit { + break; + } + } + } + } + + self.shutdown().await; + Ok(()) + } + + async fn shutdown(&mut self) { + let snapshot = self.telemetry.write_hot_paths_snapshot(); + if let Err(error) = snapshot { + warn!("telemetry snapshot flush failed: {error}"); + } + self.client_reader_task.abort(); + let worker = self.worker.take(); + if let Some(worker) = worker { + worker.process.terminate().await; + } + let _keep_watcher_alive = &self.watcher; + } + + async fn handle_event(&mut self, event: HostEvent) -> io::Result<HostControl> { + match event { + HostEvent::ClientFrame(frame) => { + self.on_client_frame(frame).await?; + Ok(HostControl::Continue) + } + HostEvent::ClientClosed => Ok(HostControl::Exit), + HostEvent::ClientFault(error) => { + warn!("client transport fault: {error}"); + Ok(HostControl::Exit) + } + HostEvent::WorkerFrame { generation, frame } => { + self.on_worker_frame(generation, frame).await?; + Ok(HostControl::Continue) + } + HostEvent::WorkerFault { generation, error } => { + self.on_worker_fault(generation, error).await?; + Ok(HostControl::Continue) + } + HostEvent::Watcher(event) => { + self.on_watcher_event(event); + Ok(HostControl::Continue) + } + } + } + + async fn on_tick(&mut self) -> io::Result<()> { + if let Some(requested_at) = self.reload_requested_at { + let elapsed = Instant::now().saturating_duration_since(requested_at); + if elapsed >= self.config.reload_debounce { + self.execute_reload_if_binary_changed().await?; + } + } + + if let Some(respawn_due) = self.respawn_due + && Instant::now() >= respawn_due + { + self.respawn_due = None; + if self.worker.is_none() { + let spawn = self.spawn_worker().await; + if let Err(error) = spawn { + warn!("worker spawn failed: {error}"); + self.schedule_respawn_with_backoff(); + } + } + } + Ok(()) + } + + fn schedule_respawn_with_backoff(&mut self) { + let delay = self.respawn_backoff.consume_delay(); + self.respawn_due = Some(Instant::now() + delay); + } + + fn schedule_immediate_spawn(&mut self) { + self.respawn_due = Some(Instant::now()); + } + + async fn restart_worker_immediately(&mut self) -> io::Result<()> { + self.terminate_worker().await; + self.requeue_pending_for_replay().await?; + self.schedule_immediate_spawn(); + Ok(()) + } + + async fn terminate_worker(&mut self) { + let worker = self.worker.take(); + if let Some(worker) = worker { + worker.process.terminate().await; + } + } + + async fn spawn_worker(&mut self) -> io::Result<()> { + let generation = self.generation_counter.next(); + let process = spawn_worker_process( + &self.config.worker_binary, + generation, + self.event_tx.clone(), + ) + .await?; + let mut worker = ActiveWorker { + process, + readiness: ActiveWorkerReadiness::Ready, + }; + + let replay_seed = self.session_kernel.replay_seed(); + if let Some(seed) = replay_seed { + let replay_write = + write_frame(&mut worker.process.stdin, &seed.initialize_request.payload).await; + if let Err(error) = replay_write { + worker.process.terminate().await; + return Err(error); + } + worker.readiness = ActiveWorkerReadiness::ReplayingInitialize { + request_id: seed.initialize_request.id, + initialized_notification: seed.initialized_notification, + }; + info!( + generation = generation.get(), + "spawned worker and started initialize replay" + ); + } else { + info!(generation = generation.get(), "spawned worker"); + } + + self.worker = Some(worker); + self.respawn_backoff.reset(); + self.worker_binary_canonical = fs::canonicalize(&self.config.worker_binary).ok(); + self.binary_fingerprint = BinaryFingerprint::capture(&self.config.worker_binary).ok(); + + if self.worker_accepts_client_traffic() { + self.drain_queue().await?; + } + Ok(()) + } + + fn worker_accepts_client_traffic(&self) -> bool { + self.worker.as_ref().is_some_and(ActiveWorker::is_ready) + } + + fn active_generation(&self) -> Option<WorkerGeneration> { + self.worker.as_ref().map(ActiveWorker::generation) + } + + fn is_active_generation(&self, generation: WorkerGeneration) -> bool { + self.active_generation() == Some(generation) + } + + fn on_watcher_event(&mut self, event: notify::Result<Event>) { + match event { + Ok(event) => { + if should_consider_watch_event(&event) + && event_targets_binary( + &event, + &self.config.worker_binary, + self.worker_binary_canonical.as_deref(), + ) + { + self.reload_requested_at = Some(Instant::now()); + } + } + Err(error) => { + warn!("watcher callback fault: {error}"); + self.reload_requested_at = Some(Instant::now()); + } + } + } + + async fn execute_reload_if_binary_changed(&mut self) -> io::Result<()> { + let fingerprint = BinaryFingerprint::capture(&self.config.worker_binary); + match fingerprint { + Ok(fingerprint) => { + let changed = self.binary_fingerprint.as_ref() != Some(&fingerprint); + if changed { + self.binary_fingerprint = Some(fingerprint); + self.worker_binary_canonical = + fs::canonicalize(&self.config.worker_binary).ok(); + self.reload_requested_at = None; + info!("detected worker binary update, rolling host forward"); + let rollout = self.roll_host_forward().await; + if let Err(error) = rollout { + warn!( + "host self-rollout failed, falling back to worker-only restart: {error}" + ); + } + } else { + self.reload_requested_at = None; + } + } + Err(error) if error.kind() == io::ErrorKind::NotFound => { + debug!("worker binary temporarily missing during reload check"); + self.reload_requested_at = Some(Instant::now()); + } + Err(error) => { + warn!("unable to fingerprint worker binary for reload: {error}"); + self.reload_requested_at = Some(Instant::now()); + } + } + Ok(()) + } + + async fn roll_host_forward(&mut self) -> io::Result<()> { + self.terminate_worker().await; + self.requeue_pending_for_replay().await?; + let snapshot = self.session_kernel.snapshot(); + let state_path = write_snapshot_file("adequate-rust-mcp-host-reexec", &snapshot)?; + let reexec = reexec_host_process(&self.config.worker_binary, &state_path); + if let Err(error) = remove_snapshot_file(&state_path) { + warn!("failed to clean up host reexec snapshot after failed rollout: {error}"); + } + self.schedule_immediate_spawn(); + reexec + } + + async fn on_client_frame(&mut self, frame: FramedMessage) -> io::Result<()> { + self.session_kernel.observe_client_frame(&frame); + if self.worker_accepts_client_traffic() { + let forward = self.forward_client_frame(frame).await; + if let Err(error) = forward { + self.handle_worker_failure(error, "failed forwarding client frame") + .await?; + } + return Ok(()); + } + self.enqueue_client_frame(frame).await + } + + async fn enqueue_client_frame(&mut self, frame: FramedMessage) -> io::Result<()> { + if let Err(reason) = self + .session_kernel + .queue_client_frame(frame.clone(), self.config.queue_capacity) + { + self.respond_rejected_frame(&frame, reason).await?; + return Ok(()); + } + if self.worker.is_none() && self.respawn_due.is_none() { + self.schedule_immediate_spawn(); + } + Ok(()) + } + + async fn respond_rejected_frame( + &mut self, + frame: &FramedMessage, + reason: HostRejection, + ) -> io::Result<()> { + if let RpcEnvelopeKind::Request { id, .. } = frame.classify() { + self.emit_error_response(&id, reason).await?; + self.record_host_error_for_rejected_frame(&id, frame, reason, 0); + } + Ok(()) + } + + async fn forward_client_frame(&mut self, frame: FramedMessage) -> io::Result<()> { + if let RpcEnvelopeKind::Request { ref method, .. } = frame.classify() { + let replay_contract = classify_replay_contract(&frame, method.as_str()); + self.session_kernel + .record_forwarded_request(&frame, replay_contract); + } + + self.write_to_worker(&frame.payload).await + } + + async fn drain_queue(&mut self) -> io::Result<()> { + loop { + if !self.worker_accepts_client_traffic() { + break; + } + let frame = self.session_kernel.pop_queued_frame(); + let Some(frame) = frame else { + break; + }; + let forward = self.forward_client_frame(frame).await; + if let Err(error) = forward { + self.handle_worker_failure(error, "failed while draining client queue") + .await?; + break; + } + } + Ok(()) + } + + async fn on_worker_frame( + &mut self, + generation: WorkerGeneration, + frame: FramedMessage, + ) -> io::Result<()> { + if !self.is_active_generation(generation) { + return Ok(()); + } + + match frame.classify() { + RpcEnvelopeKind::Response { id, has_error } => { + if self.is_replay_initialize_response(&id) { + if has_error { + warn!( + generation = generation.get(), + "initialize replay failed on hot-swapped worker" + ); + self.restart_worker_immediately().await?; + return Ok(()); + } + + if let Some(completed) = self.session_kernel.take_completed_request(&id) { + self.record_tool_completion_from_response( + &id, + &completed.request, + &frame, + has_error, + completed.replay_attempts, + ); + self.write_to_client(&frame.payload).await?; + } + + let replay_initialized = self.take_replay_initialized_notification(); + if let Some(payload) = replay_initialized { + let write = self.write_to_worker(&payload).await; + if let Err(error) = write { + self.handle_worker_failure( + error, + "failed replaying initialized notification", + ) + .await?; + return Ok(()); + } + } + + self.mark_worker_ready(); + self.drain_queue().await?; + return Ok(()); + } + + if let Some(completed) = self.session_kernel.take_completed_request(&id) { + self.record_tool_completion_from_response( + &id, + &completed.request, + &frame, + has_error, + completed.replay_attempts, + ); + self.write_to_client(&frame.payload).await?; + } else { + debug!( + generation = generation.get(), + "dropping response with unknown id from worker" + ); + } + } + RpcEnvelopeKind::Request { .. } + | RpcEnvelopeKind::Notification { .. } + | RpcEnvelopeKind::Unknown => { + if self.worker_accepts_client_traffic() { + self.write_to_client(&frame.payload).await?; + } + } + } + Ok(()) + } + + fn is_replay_initialize_response(&self, response_id: &RequestId) -> bool { + let readiness = self.worker.as_ref().map(|worker| &worker.readiness); + match readiness { + Some(ActiveWorkerReadiness::ReplayingInitialize { request_id, .. }) => { + request_id == response_id + } + Some(ActiveWorkerReadiness::Ready) | None => false, + } + } + + fn take_replay_initialized_notification(&mut self) -> Option<Vec<u8>> { + let readiness = self.worker.as_mut().map(|worker| &mut worker.readiness); + match readiness { + Some(ActiveWorkerReadiness::ReplayingInitialize { + initialized_notification, + .. + }) => initialized_notification.take(), + Some(ActiveWorkerReadiness::Ready) | None => None, + } + } + + fn mark_worker_ready(&mut self) { + if let Some(worker) = self.worker.as_mut() { + worker.readiness = ActiveWorkerReadiness::Ready; + } + } + + async fn on_worker_fault( + &mut self, + generation: WorkerGeneration, + error: io::Error, + ) -> io::Result<()> { + if !self.is_active_generation(generation) { + return Ok(()); + } + self.handle_worker_failure(error, "worker transport fault") + .await + } + + async fn handle_worker_failure(&mut self, error: io::Error, context: &str) -> io::Result<()> { + warn!("{context}: {error}"); + self.terminate_worker().await; + self.requeue_pending_for_replay().await?; + self.schedule_respawn_with_backoff(); + Ok(()) + } + + async fn requeue_pending_for_replay(&mut self) -> io::Result<()> { + let outcome = self + .session_kernel + .requeue_pending_for_replay(ReplayBudget { + max_attempts: self.config.max_replay_attempts, + queue_capacity: self.config.queue_capacity, + }); + for rejected in outcome.rejected { + warn!( + request = ?rejected.request_id, + method = rejected.request.method, + reason = rejected.reason.message(), + "recovery dropped pending request" + ); + self.emit_error_response(&rejected.request_id, rejected.reason) + .await?; + self.record_host_error_for_pending_request( + &rejected.request_id, + &rejected.request, + rejected.reason, + rejected.next_attempt, + ); + } + Ok(()) + } + + fn record_tool_completion_from_response( + &mut self, + request_id: &RequestId, + pending: &PendingRequest, + response: &FramedMessage, + has_rpc_error: bool, + replay_attempts: u8, + ) { + let tool_meta = pending.tool_call_meta.as_ref(); + let Some(tool_meta) = tool_meta else { + return; + }; + let latency_ms = duration_millis_u64(pending.started_at.elapsed()); + let (outcome, error) = classify_tool_outcome_from_response(response, has_rpc_error); + let write = self.telemetry.record_tool_completion( + request_id, + tool_meta, + latency_ms, + replay_attempts, + outcome, + error, + ); + if let Err(error) = write { + warn!("telemetry write failed: {error}"); + } + } + + fn record_host_error_for_pending_request( + &mut self, + request_id: &RequestId, + pending: &PendingRequest, + reason: HostRejection, + replay_attempts: u8, + ) { + let tool_meta = pending.tool_call_meta.as_ref(); + let Some(tool_meta) = tool_meta else { + return; + }; + let latency_ms = duration_millis_u64(pending.started_at.elapsed()); + let write = self.telemetry.record_tool_completion( + request_id, + tool_meta, + latency_ms, + replay_attempts, + ToolOutcome::Error, + host_tool_error_detail(reason), + ); + if let Err(error) = write { + warn!("telemetry write failed: {error}"); + } + } + + fn record_host_error_for_rejected_frame( + &mut self, + request_id: &RequestId, + frame: &FramedMessage, + reason: HostRejection, + replay_attempts: u8, + ) { + let method = frame + .value + .get("method") + .and_then(Value::as_str) + .unwrap_or_default(); + let tool_meta = parse_tool_call_meta(frame, method); + let Some(tool_meta) = tool_meta else { + return; + }; + let write = self.telemetry.record_tool_completion( + request_id, + &tool_meta, + 0, + replay_attempts, + ToolOutcome::Error, + host_tool_error_detail(reason), + ); + if let Err(error) = write { + warn!("telemetry write failed: {error}"); + } + } + + async fn emit_error_response( + &mut self, + request_id: &RequestId, + reason: HostRejection, + ) -> io::Result<()> { + let payload = json!({ + "jsonrpc": "2.0", + "id": request_id.to_json_value(), + "error": { + "code": reason.code(), + "message": reason.message(), + }, + }); + let bytes = serde_json::to_vec(&payload).map_err(|error| { + io::Error::new( + io::ErrorKind::InvalidData, + format!("failed to serialize host error response: {error}"), + ) + })?; + self.write_to_client(&bytes).await + } + + async fn write_to_client(&mut self, payload: &[u8]) -> io::Result<()> { + write_frame(&mut self.client_stdout, payload).await + } + + async fn write_to_worker(&mut self, payload: &[u8]) -> io::Result<()> { + let worker = self.worker.as_mut().ok_or_else(|| { + io::Error::new( + io::ErrorKind::BrokenPipe, + "worker unavailable while writing", + ) + })?; + write_frame(&mut worker.process.stdin, payload).await + } +} + +fn init_tracing() { + let init_result = tracing_subscriber::fmt() + .with_max_level(Level::INFO) + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .with_writer(io::stderr) + .try_init(); + if let Err(error) = init_result { + eprintln!("host tracing init skipped: {error}"); + } +} + +/// Runs the stable host mode that survives worker failures and hot-swaps worker binaries. +pub(crate) async fn run_host() -> Result<(), Box<dyn std::error::Error>> { + init_tracing(); + let config = HostConfig::from_env()?; + let restored_state = + load_snapshot_file_from_env::<HostSessionKernelSnapshot>(HOST_REEXEC_STATE_ENV)? + .map(HostSessionKernelSnapshot::restore) + .transpose()?; + if restored_state.is_some() { + info!("restoring host state after self-rollout"); + } + info!(telemetry_path = %config.telemetry_path.display(), "starting host telemetry"); + let telemetry = TelemetryLog::new( + config.telemetry_path.as_path(), + config.workspace_root.as_path(), + config.telemetry_snapshot_every, + )?; + let (event_tx, event_rx) = mpsc::unbounded_channel::<HostEvent>(); + let client_reader_task = spawn_client_reader(event_tx.clone()); + let watcher = spawn_binary_watcher(&config.worker_binary, event_tx.clone())?; + let restored_state = restored_state.unwrap_or_else(RestoredHostSessionKernel::cold); + + let mut runtime = HostRuntime { + telemetry, + worker_binary_canonical: fs::canonicalize(&config.worker_binary).ok(), + binary_fingerprint: BinaryFingerprint::capture(&config.worker_binary).ok(), + respawn_backoff: RespawnBackoff::new(config.respawn_floor, config.respawn_ceiling), + config, + events: event_rx, + event_tx, + client_reader_task, + watcher, + client_stdout: tokio::io::stdout(), + worker: None, + session_kernel: HostSessionKernel::from_restored(restored_state), + generation_counter: GenerationCounter::default(), + respawn_due: Some(Instant::now()), + reload_requested_at: None, + }; + runtime.run().await?; + Ok(()) +} diff --git a/crates/adequate-rust-mcp/src/host/telemetry.rs b/crates/adequate-rust-mcp/src/host/telemetry.rs new file mode 100644 index 0000000..fcd3bf2 --- /dev/null +++ b/crates/adequate-rust-mcp/src/host/telemetry.rs @@ -0,0 +1,97 @@ +use serde_json::Value; + +use super::protocol::FramedMessage; + +pub(super) use libmcp::{HostRejection, TelemetryLog, ToolErrorDetail, ToolOutcome}; + +pub(super) fn host_tool_error_detail(reason: HostRejection) -> ToolErrorDetail { + ToolErrorDetail { + code: Some(reason.code()), + kind: Some("host_error".to_owned()), + message: Some(reason.message().to_owned()), + } +} + +pub(super) fn classify_tool_outcome_from_response( + response: &FramedMessage, + has_rpc_error: bool, +) -> (ToolOutcome, ToolErrorDetail) { + if has_rpc_error { + let error = response.value.get("error").and_then(Value::as_object); + let code = error + .and_then(|error| error.get("code")) + .and_then(Value::as_i64); + let message = error + .and_then(|error| error.get("message")) + .and_then(Value::as_str) + .map(str::to_owned); + let kind = error + .and_then(extract_jsonrpc_error_kind_from_data) + .or_else(|| jsonrpc_error_kind(code)); + return ( + ToolOutcome::Error, + ToolErrorDetail { + code, + kind, + message, + }, + ); + } + + let result = response.value.get("result"); + let is_tool_error = result + .and_then(|result| result.get("isError")) + .and_then(Value::as_bool) + .unwrap_or(false); + if is_tool_error { + let message = result.and_then(extract_tool_error_message); + return ( + ToolOutcome::Error, + ToolErrorDetail { + code: None, + kind: Some("tool_error_payload".to_owned()), + message, + }, + ); + } + + (ToolOutcome::Ok, ToolErrorDetail::default()) +} + +fn extract_jsonrpc_error_kind_from_data(error: &serde_json::Map<String, Value>) -> Option<String> { + error + .get("data") + .and_then(Value::as_object) + .and_then(|data| data.get("kind")) + .and_then(Value::as_str) + .map(str::to_owned) +} + +fn jsonrpc_error_kind(code: Option<i64>) -> Option<String> { + let code = code?; + let kind = match code { + -32700 => "parse_error", + -32600 => "invalid_request", + -32601 => "method_not_found", + -32602 => "invalid_params", + -32603 => "internal_error", + -32095 => "replay_budget_exhausted", + -32097 => "queue_overflow", + _ if (-32099..=-32000).contains(&code) => "server_error", + _ => "jsonrpc_error", + }; + Some(kind.to_owned()) +} + +fn extract_tool_error_message(result: &Value) -> Option<String> { + let content = result.get("content").and_then(Value::as_array)?; + content + .first() + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str) + .map(str::to_owned) +} + +pub(super) fn duration_millis_u64(duration: std::time::Duration) -> u64 { + u64::try_from(duration.as_millis()).unwrap_or(u64::MAX) +} diff --git a/crates/adequate-rust-mcp/src/host/tests.rs b/crates/adequate-rust-mcp/src/host/tests.rs new file mode 100644 index 0000000..ded0997 --- /dev/null +++ b/crates/adequate-rust-mcp/src/host/tests.rs @@ -0,0 +1,338 @@ +use super::{ + binary::{BinaryFingerprint, event_targets_binary}, + config::RespawnBackoff, + protocol::{ + EffectReplayContract, FrameReadOutcome, FramedMessage, RequestId, RpcEnvelopeKind, + classify_replay_contract, parse_tool_call_meta, read_frame, write_frame, + }, + telemetry::{ToolOutcome, classify_tool_outcome_from_response}, +}; +use notify::{Event, EventKind}; +use serde_json::{Value, json}; +use std::time::Duration; +use tokio::io::{BufReader, duplex}; + +fn encode_json(value: &Value) -> Vec<u8> { + let serialized = serde_json::to_vec(value); + assert!( + serialized.is_ok(), + "expected JSON serialization to succeed in test fixture" + ); + serialized.unwrap_or_default() +} + +#[test] +fn request_id_roundtrip_preserves_number_and_text() { + let numeric = RequestId::from_json_value(&json!(42)); + assert!(numeric.is_some(), "expected numeric request id to parse"); + let Some(numeric) = numeric else { + return; + }; + assert!(matches!(numeric, RequestId::Number(ref value) if value == "42")); + assert_eq!(numeric.to_json_value(), json!(42)); + + let textual = RequestId::from_json_value(&json!("abc")); + assert!(textual.is_some(), "expected string request id to parse"); + let Some(textual) = textual else { + return; + }; + assert!(matches!(textual, RequestId::Text(ref value) if value == "abc")); + assert_eq!(textual.to_json_value(), json!("abc")); +} + +#[test] +fn framed_message_classification_distinguishes_request_response_notification() { + let request = FramedMessage::parse(encode_json( + &json!({"jsonrpc":"2.0","id":1,"method":"initialize","params":{}}), + )); + assert!(request.is_ok(), "request fixture failed to parse"); + let Ok(request) = request else { + return; + }; + assert!(matches!( + request.classify(), + RpcEnvelopeKind::Request { ref method, .. } if method == "initialize" + )); + + let response = + FramedMessage::parse(encode_json(&json!({"jsonrpc":"2.0","id":"x","result":{}}))); + assert!(response.is_ok(), "response fixture failed to parse"); + let Ok(response) = response else { + return; + }; + assert!(matches!( + response.classify(), + RpcEnvelopeKind::Response { + has_error: false, + .. + } + )); + + let notification = FramedMessage::parse(encode_json( + &json!({"jsonrpc":"2.0","method":"initialized","params":{}}), + )); + assert!(notification.is_ok(), "notification fixture failed to parse"); + let Ok(notification) = notification else { + return; + }; + assert!(matches!( + notification.classify(), + RpcEnvelopeKind::Notification { ref method } if method == "initialized" + )); +} + +#[tokio::test] +async fn frame_codec_round_trip_preserves_payload() { + let (mut writer, reader) = duplex(512); + let payload = encode_json(&json!({"jsonrpc":"2.0","id":9,"result":{"ok":true}})); + let payload_for_assert = payload.clone(); + + let write_handle = tokio::spawn(async move { write_frame(&mut writer, &payload).await }); + let mut framed_reader = BufReader::new(reader); + let read = read_frame(&mut framed_reader).await; + assert!(read.is_ok(), "read_frame failed"); + let Ok(read) = read else { + return; + }; + assert!(matches!(read, FrameReadOutcome::Frame(ref frame) if frame == &payload_for_assert)); + + let joined = write_handle.await; + assert!(joined.is_ok(), "writer task join failed"); + let Ok(joined) = joined else { + return; + }; + assert!(joined.is_ok(), "write_frame failed"); +} + +#[test] +fn binary_fingerprint_changes_after_content_update() { + let temp = tempfile::NamedTempFile::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(file) => file, + Err(_) => return, + }; + let path = temp.path().to_path_buf(); + + let write_one = std::fs::write(&path, b"alpha"); + assert!(write_one.is_ok()); + + let first = BinaryFingerprint::capture(&path); + assert!(first.is_ok()); + let first = match first { + Ok(value) => value, + Err(_) => return, + }; + + std::thread::sleep(Duration::from_millis(3)); + let write_two = std::fs::write(&path, b"beta-gamma"); + assert!(write_two.is_ok()); + + let second = BinaryFingerprint::capture(&path); + assert!( + second.is_ok(), + "expected second fingerprint capture to succeed" + ); + let Ok(second) = second else { + return; + }; + assert_ne!(first, second); +} + +#[test] +fn respawn_backoff_doubles_and_caps() { + let floor = Duration::from_millis(10); + let ceiling = Duration::from_millis(40); + let mut backoff = RespawnBackoff::new(floor, ceiling); + assert_eq!(backoff.consume_delay(), Duration::from_millis(10)); + assert_eq!(backoff.consume_delay(), Duration::from_millis(20)); + assert_eq!(backoff.consume_delay(), Duration::from_millis(40)); + assert_eq!(backoff.consume_delay(), Duration::from_millis(40)); + backoff.reset(); + assert_eq!(backoff.consume_delay(), Duration::from_millis(10)); +} + +#[test] +fn watcher_event_target_match_handles_exact_path() { + let temp = tempfile::NamedTempFile::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(file) => file, + Err(_) => return, + }; + let path = temp.path().to_path_buf(); + let mut event = Event::new(EventKind::Modify(notify::event::ModifyKind::Any)); + event.paths.push(path.clone()); + assert!(event_targets_binary(&event, &path, Some(path.as_path()))); +} + +#[test] +fn watcher_event_target_ignores_empty_path_payload() { + let temp = tempfile::NamedTempFile::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(file) => file, + Err(_) => return, + }; + let path = temp.path().to_path_buf(); + let event = Event::new(EventKind::Other); + assert!(!event_targets_binary(&event, &path, Some(path.as_path()))); +} + +#[test] +fn replay_contract_marks_safe_tools_convergent() { + let frame = FramedMessage::parse(encode_json(&json!({ + "jsonrpc":"2.0", + "id": 1, + "method": "tools/call", + "params": { + "name": "hover", + "arguments": { "file_path": "/tmp/lib.rs", "line": 1, "column": 1 } + } + }))); + assert!(frame.is_ok()); + let frame = match frame { + Ok(value) => value, + Err(_) => return, + }; + let replay = classify_replay_contract(&frame, "tools/call"); + assert_eq!(replay, EffectReplayContract::Convergent); +} + +#[test] +fn replay_contract_marks_clippy_diagnostics_convergent() { + let frame = FramedMessage::parse(encode_json(&json!({ + "jsonrpc":"2.0", + "id": 1, + "method": "tools/call", + "params": { + "name": "clippy_diagnostics", + "arguments": { "file_path": "/tmp/lib.rs" } + } + }))); + assert!(frame.is_ok()); + let frame = match frame { + Ok(value) => value, + Err(_) => return, + }; + let replay = classify_replay_contract(&frame, "tools/call"); + assert_eq!(replay, EffectReplayContract::Convergent); +} + +#[test] +fn replay_contract_marks_execute_command_probe_required() { + let frame = FramedMessage::parse(encode_json(&json!({ + "jsonrpc":"2.0", + "id": 1, + "method": "tools/call", + "params": { + "name": "advanced_lsp_request", + "arguments": { "method": "workspace/executeCommand", "params": {} } + } + }))); + assert!(frame.is_ok()); + let frame = match frame { + Ok(value) => value, + Err(_) => return, + }; + let replay = classify_replay_contract(&frame, "tools/call"); + assert_eq!(replay, EffectReplayContract::ProbeRequired); +} + +#[test] +fn replay_contract_marks_fix_everything_probe_required() { + let frame = FramedMessage::parse(encode_json(&json!({ + "jsonrpc":"2.0", + "id": 1, + "method": "tools/call", + "params": { + "name": "fix_everything", + "arguments": {} + } + }))); + assert!(frame.is_ok()); + let frame = match frame { + Ok(value) => value, + Err(_) => return, + }; + let replay = classify_replay_contract(&frame, "tools/call"); + assert_eq!(replay, EffectReplayContract::ProbeRequired); +} + +#[test] +fn parse_tool_call_meta_extracts_path_hint_from_stringified_params() { + let frame = FramedMessage::parse(encode_json(&json!({ + "jsonrpc":"2.0", + "id": 1, + "method": "tools/call", + "params": { + "name": "advanced_lsp_request", + "arguments": { + "method": "textDocument/definition", + "params": "{\"textDocument\":{\"uri\":\"file:///tmp/lib.rs\"},\"position\":{\"line\":6,\"character\":16}}" + } + } + }))); + assert!(frame.is_ok()); + let frame = match frame { + Ok(value) => value, + Err(_) => return, + }; + + let meta = parse_tool_call_meta(&frame, "tools/call"); + assert!(meta.is_some()); + let meta = match meta { + Some(value) => value, + None => return, + }; + assert_eq!(meta.lsp_method.as_deref(), Some("textDocument/definition")); + assert_eq!(meta.path_hint.as_deref(), Some("/tmp/lib.rs")); +} + +#[test] +fn response_error_kind_prefers_jsonrpc_data_kind() { + let frame = FramedMessage::parse(encode_json(&json!({ + "jsonrpc":"2.0", + "id": 7, + "error": { + "code": -32603, + "message": "internal error", + "data": { + "kind": "porcelain_fallback", + "hint": "retry once" + } + } + }))); + assert!(frame.is_ok()); + let frame = match frame { + Ok(value) => value, + Err(_) => return, + }; + + let (outcome, detail) = classify_tool_outcome_from_response(&frame, true); + assert!(matches!(outcome, ToolOutcome::Error)); + assert_eq!(detail.code, Some(-32603)); + assert_eq!(detail.kind.as_deref(), Some("porcelain_fallback")); +} + +#[test] +fn response_error_kind_falls_back_to_jsonrpc_code_mapping() { + let frame = FramedMessage::parse(encode_json(&json!({ + "jsonrpc":"2.0", + "id": 8, + "error": { + "code": -32602, + "message": "invalid params" + } + }))); + assert!(frame.is_ok()); + let frame = match frame { + Ok(value) => value, + Err(_) => return, + }; + + let (outcome, detail) = classify_tool_outcome_from_response(&frame, true); + assert!(matches!(outcome, ToolOutcome::Error)); + assert_eq!(detail.code, Some(-32602)); + assert_eq!(detail.kind.as_deref(), Some("invalid_params")); +} diff --git a/crates/adequate-rust-mcp/src/main.rs b/crates/adequate-rust-mcp/src/main.rs new file mode 100644 index 0000000..e42806b --- /dev/null +++ b/crates/adequate-rust-mcp/src/main.rs @@ -0,0 +1,88 @@ +//! Process-mode dispatch for adequate-rust-mcp. + +mod host; +mod worker; + +#[cfg(test)] +use serial_test as _; +use std::{ffi::OsString, io}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum LaunchMode { + Host, + Worker, + Help, +} + +fn parse_launch_mode(args: impl IntoIterator<Item = OsString>) -> Result<LaunchMode, String> { + let collected = args.into_iter().collect::<Vec<_>>(); + match collected.as_slice() { + [] => Ok(LaunchMode::Host), + [flag] if flag == "--worker" => Ok(LaunchMode::Worker), + [flag] if flag == "--help" || flag == "-h" => Ok(LaunchMode::Help), + _ => Err("invalid arguments: use no args for host mode or --worker for worker mode".into()), + } +} + +fn print_usage() { + println!("adequate-rust-mcp"); + println!(); + println!("USAGE:"); + println!(" adequate-rust-mcp"); + println!(" adequate-rust-mcp --worker"); + println!(" adequate-rust-mcp --help"); + println!(); + println!("MODES:"); + println!(" host stable public MCP endpoint that supervises worker generations"); + println!(" --worker disposable worker process spawned by the host"); +} + +#[tokio::main] +async fn main() -> Result<(), Box<dyn std::error::Error>> { + let mode = parse_launch_mode(std::env::args_os().skip(1)) + .map_err(|message| io::Error::new(io::ErrorKind::InvalidInput, message))?; + match mode { + LaunchMode::Host => host::run_host().await, + LaunchMode::Worker => worker::run_worker().await, + LaunchMode::Help => { + print_usage(); + Ok(()) + } + } +} + +#[cfg(test)] +mod tests { + use super::{LaunchMode, parse_launch_mode}; + use std::ffi::OsString; + + #[test] + fn parse_launch_mode_defaults_to_host() { + let mode = parse_launch_mode(Vec::<OsString>::new()); + assert!(matches!(mode, Ok(LaunchMode::Host))); + } + + #[test] + fn parse_launch_mode_accepts_worker_flag() { + let mode = parse_launch_mode(vec![OsString::from("--worker")]); + assert!(matches!(mode, Ok(LaunchMode::Worker))); + } + + #[test] + fn parse_launch_mode_accepts_help_flag() { + let mode = parse_launch_mode(vec![OsString::from("--help")]); + assert!(matches!(mode, Ok(LaunchMode::Help))); + } + + #[test] + fn parse_launch_mode_rejects_unknown_flags() { + let mode = parse_launch_mode(vec![OsString::from("--wat")]); + assert!(mode.is_err()); + } + + #[test] + fn parse_launch_mode_rejects_multiple_args() { + let mode = parse_launch_mode(vec![OsString::from("--worker"), OsString::from("--extra")]); + assert!(mode.is_err()); + } +} diff --git a/crates/adequate-rust-mcp/src/worker/clippy.rs b/crates/adequate-rust-mcp/src/worker/clippy.rs new file mode 100644 index 0000000..89b9b3a --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/clippy.rs @@ -0,0 +1,196 @@ +use super::{ + CargoJsonMessageWire, DiagnosticEntry, DiagnosticLevel, DiagnosticsReport, PorcelainErrorKind, + RustcSpanWire, SourceFilePath, SourcePoint, SourceRange, porcelain_internal_error, + resolve_clippy_command_spec, resolve_workspace_root_path, run_workspace_command, +}; +use ra_mcp_domain::types::{OneIndexedColumn, OneIndexedLine}; +use rmcp::ErrorData as McpError; +use std::{ + collections::HashMap, + fs, + path::{Path, PathBuf}, +}; + +pub(super) async fn collect_clippy_diagnostics( + file_paths: Vec<SourceFilePath>, +) -> Result<DiagnosticsReport, McpError> { + let workspace_root = resolve_workspace_root_path()?; + let clippy_command = resolve_clippy_command_spec(workspace_root.as_path())?; + let rendered_command = clippy_command.rendered(); + let output = run_workspace_command( + workspace_root.as_path(), + &clippy_command, + rendered_command.as_str(), + ) + .await?; + let diagnostics = parse_clippy_json_stream( + output.standard_output.as_str(), + file_paths.as_slice(), + workspace_root.as_path(), + ); + if !output.status.success() && diagnostics.is_empty() { + return Err(porcelain_internal_error( + format!( + "clippy command failed without matching diagnostics for requested files: {}", + output.standard_error.trim() + ), + PorcelainErrorKind::ToolRuntimeFailure, + Some("run check.py locally to inspect full clippy output"), + false, + )); + } + Ok(DiagnosticsReport { diagnostics }) +} + +pub(super) fn parse_clippy_json_stream( + stdout: &str, + target_file_paths: &[SourceFilePath], + workspace_root: &Path, +) -> Vec<DiagnosticEntry> { + let mut target_by_normalized = HashMap::<PathBuf, SourceFilePath>::new(); + let mut target_order = Vec::<PathBuf>::new(); + for target_path in target_file_paths { + let normalized = normalize_path_for_match(target_path.as_path()); + target_order.push(normalized.clone()); + let _existing = target_by_normalized + .entry(normalized) + .or_insert_with(|| target_path.clone()); + } + let mut diagnostics_by_path = HashMap::<PathBuf, Vec<DiagnosticEntry>>::new(); + for line in stdout.lines() { + let Ok(payload) = serde_json::from_str::<CargoJsonMessageWire>(line) else { + continue; + }; + if payload.reason != "compiler-message" { + continue; + } + let Some(message) = payload.message else { + continue; + }; + let Some((primary, matched_path, matched_target_path)) = select_matching_target_span( + message.spans.as_slice(), + &target_by_normalized, + workspace_root, + ) else { + continue; + }; + let Some(range) = clippy_span_to_source_range(matched_target_path, primary) else { + continue; + }; + diagnostics_by_path + .entry(matched_path) + .or_default() + .push(DiagnosticEntry { + range, + level: clippy_level_to_diagnostic_level(message.level.as_str()), + code: message.code.map(|code| code.code), + message: message.message, + }); + } + for diagnostics in diagnostics_by_path.values_mut() { + sort_diagnostics(diagnostics); + diagnostics.dedup(); + } + let mut fused = Vec::new(); + for normalized_target in target_order { + if let Some(diagnostics) = diagnostics_by_path.get(&normalized_target) { + fused.extend(diagnostics.iter().cloned()); + } + } + fused +} + +pub(super) fn sort_diagnostics(diagnostics: &mut [DiagnosticEntry]) { + diagnostics.sort_by(|left, right| { + let left_rank = diagnostic_level_sort_rank(left.level); + let right_rank = diagnostic_level_sort_rank(right.level); + let left_start = left.range.start(); + let right_start = right.range.start(); + let left_end = left.range.end(); + let right_end = right.range.end(); + left_rank + .cmp(&right_rank) + .then_with(|| left_start.line().get().cmp(&right_start.line().get())) + .then_with(|| left_start.column().get().cmp(&right_start.column().get())) + .then_with(|| left_end.line().get().cmp(&right_end.line().get())) + .then_with(|| left_end.column().get().cmp(&right_end.column().get())) + .then_with(|| left.code.cmp(&right.code)) + .then_with(|| left.message.cmp(&right.message)) + }); +} + +pub(super) fn select_matching_target_span<'a>( + spans: &'a [RustcSpanWire], + target_by_normalized: &'a HashMap<PathBuf, SourceFilePath>, + workspace_root: &Path, +) -> Option<(&'a RustcSpanWire, PathBuf, &'a SourceFilePath)> { + spans + .iter() + .find_map(|span| { + if !span.is_primary { + return None; + } + let normalized = normalize_span_file_path(span, workspace_root); + let target = target_by_normalized.get(normalized.as_path())?; + Some((span, normalized, target)) + }) + .or_else(|| { + spans.iter().find_map(|span| { + let normalized = normalize_span_file_path(span, workspace_root); + let target = target_by_normalized.get(normalized.as_path())?; + Some((span, normalized, target)) + }) + }) +} + +pub(super) fn normalize_span_file_path(span: &RustcSpanWire, workspace_root: &Path) -> PathBuf { + let raw = PathBuf::from(span.file_name.as_str()); + let absolute = if raw.is_absolute() { + raw + } else { + workspace_root.join(raw) + }; + normalize_path_for_match(absolute.as_path()) +} + +pub(super) fn normalize_path_for_match(path: &Path) -> PathBuf { + fs::canonicalize(path).unwrap_or_else(|_| path.to_path_buf()) +} + +pub(super) fn clippy_span_to_source_range( + file_path: &SourceFilePath, + span: &RustcSpanWire, +) -> Option<SourceRange> { + let start_line = OneIndexedLine::try_new(span.line_start.max(1)).ok()?; + let start_column = OneIndexedColumn::try_new(span.column_start.max(1)).ok()?; + let end_line = OneIndexedLine::try_new(span.line_end.max(span.line_start).max(1)).ok()?; + let mut normalized_end_column = span.column_end.max(1); + if end_line == start_line { + normalized_end_column = normalized_end_column.max(start_column.get()); + } + let end_column = OneIndexedColumn::try_new(normalized_end_column).ok()?; + SourceRange::try_new( + file_path.clone(), + SourcePoint::new(start_line, start_column), + SourcePoint::new(end_line, end_column), + ) + .ok() +} + +pub(super) fn clippy_level_to_diagnostic_level(level: &str) -> DiagnosticLevel { + match level { + "error" => DiagnosticLevel::Error, + "warning" => DiagnosticLevel::Warning, + "help" => DiagnosticLevel::Hint, + _ => DiagnosticLevel::Information, + } +} + +const fn diagnostic_level_sort_rank(level: DiagnosticLevel) -> u8 { + match level { + DiagnosticLevel::Error => 0, + DiagnosticLevel::Warning => 1, + DiagnosticLevel::Information => 2, + DiagnosticLevel::Hint => 3, + } +} diff --git a/crates/adequate-rust-mcp/src/worker/diagnostics.rs b/crates/adequate-rust-mcp/src/worker/diagnostics.rs new file mode 100644 index 0000000..034b534 --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/diagnostics.rs @@ -0,0 +1,486 @@ +use super::{ + CompactDiagnosticOutput, Content, DiagnosticEntry, DiagnosticLevel, DiagnosticLevelOutput, + DiagnosticOutput, DiagnosticsCountsOutput, DiagnosticsJsonOutput, DiagnosticsModeInput, + DiagnosticsModeOutput, DiagnosticsPathStyleInput, DiagnosticsRenderInput, DiagnosticsReport, + Fault, FaultOutput, FlattenedDiagnostics, HealthOutput, HealthStateOutput, HoverOutput, + HoverPayload, LifecycleSnapshot, LocationOutput, PorcelainErrorKind, RangeOutput, RenameOutput, + RenameReport, SourceFilePath, SourceLocation, SourcePoint, SourceRange, + porcelain_internal_error, resolve_workspace_root_path, +}; +use libmcp::collapse_inline_whitespace; +use rmcp::{ErrorData as McpError, model::CallToolResult}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, + time::Duration, +}; + +#[derive(Debug, Clone, Copy, Default)] +pub(super) struct DiagnosticsRenderConfig { + pub(super) mode: DiagnosticsModeInput, + pub(super) render: DiagnosticsRenderInput, + pub(super) max_items: Option<usize>, + pub(super) max_message_chars: Option<usize>, + pub(super) path_style: DiagnosticsPathStyleInput, +} + +impl DiagnosticsRenderConfig { + pub(super) fn without_compact_limits(self) -> Self { + Self { + max_items: None, + max_message_chars: None, + ..self + } + } +} + +pub(super) fn render_diagnostic_path( + file_path: &SourceFilePath, + style: DiagnosticsPathStyleInput, + workspace_root: Option<&Path>, +) -> String { + let absolute = file_path.as_path(); + match style { + DiagnosticsPathStyleInput::Absolute => absolute.display().to_string(), + DiagnosticsPathStyleInput::Relative => workspace_root + .and_then(|root| absolute.strip_prefix(root).ok()) + .map(|relative| relative.display().to_string()) + .filter(|relative| !relative.is_empty()) + .unwrap_or_else(|| absolute.display().to_string()), + } +} + +pub(super) fn maybe_truncate_message(raw: String, max_message_chars: Option<usize>) -> String { + let Some(limit) = max_message_chars else { + return raw; + }; + let message_len = raw.chars().count(); + if message_len <= limit { + return raw; + } + if limit == 0 { + return String::new(); + } + if limit <= 3 { + return raw.chars().take(limit).collect(); + } + let mut truncated = raw + .chars() + .take(limit.saturating_sub(3)) + .collect::<String>(); + truncated.push_str("..."); + truncated +} + +pub(super) fn usize_to_u64_saturating(value: usize) -> u64 { + u64::try_from(value).unwrap_or(u64::MAX) +} + +pub(super) fn elapsed_millis_saturating(elapsed: Duration) -> u64 { + let millis = elapsed.as_millis(); + if millis > u128::from(u64::MAX) { + u64::MAX + } else { + millis as u64 + } +} + +pub(super) fn diagnostics_total_count(entries: &[DiagnosticEntry]) -> u64 { + usize_to_u64_saturating(entries.len()) +} + +impl DiagnosticsCountsOutput { + pub(super) fn from_entries(entries: &[DiagnosticEntry]) -> Self { + let (mut e, mut w, mut i, mut h) = (0_u64, 0_u64, 0_u64, 0_u64); + for DiagnosticEntry { level, .. } in entries { + match level { + DiagnosticLevel::Error => e = e.saturating_add(1), + DiagnosticLevel::Warning => w = w.saturating_add(1), + DiagnosticLevel::Information => i = i.saturating_add(1), + DiagnosticLevel::Hint => h = h.saturating_add(1), + } + } + Self { + error_count: e, + warning_count: w, + information_count: i, + hint_count: h, + total_count: diagnostics_total_count(entries), + } + } +} + +impl From<DiagnosticsModeInput> for DiagnosticsModeOutput { + fn from(value: DiagnosticsModeInput) -> Self { + match value { + DiagnosticsModeInput::Compact => Self::Compact, + DiagnosticsModeInput::Full => Self::Full, + DiagnosticsModeInput::Summary => Self::Summary, + } + } +} + +impl CompactDiagnosticOutput { + pub(super) fn from_entry( + value: DiagnosticEntry, + render_config: DiagnosticsRenderConfig, + workspace_root: Option<&Path>, + ) -> Self { + let DiagnosticEntry { + range, + level, + code, + message, + } = value; + let file_path = range.file_path().clone(); + let start = range.start(); + let end = range.end(); + Self { + severity: DiagnosticLevelOutput::from(level), + file_path: render_diagnostic_path(&file_path, render_config.path_style, workspace_root), + start_line: start.line().get(), + start_column: start.column().get(), + end_line: end.line().get(), + end_column: end.column().get(), + code, + message: maybe_truncate_message(message, render_config.max_message_chars), + } + } +} + +pub(super) fn flatten_diagnostics( + diagnostics: Vec<DiagnosticEntry>, + render_config: DiagnosticsRenderConfig, +) -> FlattenedDiagnostics { + let counts = DiagnosticsCountsOutput::from_entries(diagnostics.as_slice()); + let total_count = diagnostics.len(); + let visible_count = render_config + .max_items + .map_or(total_count, |limit| limit.min(total_count)); + let overflow_count = total_count.saturating_sub(visible_count); + let workspace_root = diagnostics_workspace_root(render_config.path_style); + let items = diagnostics + .into_iter() + .take(visible_count) + .map(|entry| { + CompactDiagnosticOutput::from_entry(entry, render_config, workspace_root.as_deref()) + }) + .collect(); + FlattenedDiagnostics { + counts, + items, + overflow_count, + } +} + +impl DiagnosticsJsonOutput { + pub(super) fn from_report( + report: DiagnosticsReport, + render_config: DiagnosticsRenderConfig, + ) -> Self { + let DiagnosticsReport { diagnostics } = report; + let mode = DiagnosticsModeOutput::from(render_config.mode); + match render_config.mode { + DiagnosticsModeInput::Summary => Self { + mode, + counts: DiagnosticsCountsOutput::from_entries(diagnostics.as_slice()), + truncated: false, + overflow_count: 0, + items: Vec::new(), + diagnostics: None, + }, + DiagnosticsModeInput::Full => Self { + mode, + counts: DiagnosticsCountsOutput::from_entries(diagnostics.as_slice()), + truncated: false, + overflow_count: 0, + items: Vec::new(), + diagnostics: Some( + diagnostics + .into_iter() + .map(DiagnosticOutput::from) + .collect(), + ), + }, + DiagnosticsModeInput::Compact => { + let FlattenedDiagnostics { + counts, + items, + overflow_count, + } = flatten_diagnostics(diagnostics, render_config); + Self { + mode, + counts, + truncated: overflow_count > 0, + overflow_count: usize_to_u64_saturating(overflow_count), + items, + diagnostics: None, + } + } + } + } +} + +pub(super) fn diagnostics_json_output_schema() -> Arc<rmcp::model::JsonObject> { + rmcp::handler::server::tool::schema_for_output::<DiagnosticsJsonOutput>().unwrap_or_else( + |error| { + tracing::error!( + ?error, + "failed to build diagnostics JSON output schema; falling back to empty schema" + ); + Arc::default() + }, + ) +} + +pub(super) fn diagnostics_call_tool_result( + report: DiagnosticsReport, + render_config: DiagnosticsRenderConfig, +) -> Result<CallToolResult, McpError> { + match render_config.render { + DiagnosticsRenderInput::Porcelain => Ok(CallToolResult::success(vec![Content::text( + render_diagnostics_porcelain(report, render_config), + )])), + DiagnosticsRenderInput::Json => { + let output = DiagnosticsJsonOutput::from_report(report, render_config); + let value = serde_json::to_value(output).map_err(|error| { + porcelain_internal_error( + format!("failed to serialize diagnostics JSON output: {error}"), + PorcelainErrorKind::InternalFailure, + Some("retry once; if it persists, inspect worker logs"), + false, + ) + })?; + Ok(CallToolResult::structured(value)) + } + } +} + +pub(super) fn render_diagnostics_porcelain( + report: DiagnosticsReport, + render_config: DiagnosticsRenderConfig, +) -> String { + let DiagnosticsReport { diagnostics } = report; + match render_config.mode { + DiagnosticsModeInput::Summary => porcelain_counts_line( + DiagnosticsCountsOutput::from_entries(diagnostics.as_slice()), + ), + DiagnosticsModeInput::Compact | DiagnosticsModeInput::Full => { + let flattening_config = match render_config.mode { + DiagnosticsModeInput::Compact => render_config, + DiagnosticsModeInput::Full => render_config.without_compact_limits(), + DiagnosticsModeInput::Summary => unreachable!(), + }; + let FlattenedDiagnostics { + counts, + items, + overflow_count, + } = flatten_diagnostics(diagnostics, flattening_config); + let mut lines = vec![porcelain_counts_line(counts)]; + if items.is_empty() { + return lines.join("\n"); + } + lines.push(String::new()); + lines.extend(render_porcelain_diagnostic_groups(items)); + if overflow_count > 0 { + lines.push(String::new()); + lines.push(format!( + "... {overflow_count} more diagnostics omitted; rerun with a larger max_items or render=json" + )); + } + lines.join("\n") + } + } +} + +pub(super) fn diagnostics_workspace_root(path_style: DiagnosticsPathStyleInput) -> Option<PathBuf> { + matches!(path_style, DiagnosticsPathStyleInput::Relative) + .then(resolve_workspace_root_path) + .and_then(Result::ok) +} + +pub(super) fn render_porcelain_diagnostic_groups( + entries: Vec<CompactDiagnosticOutput>, +) -> Vec<String> { + let mut groups = Vec::<(String, Vec<CompactDiagnosticOutput>)>::new(); + for entry in entries { + if let Some((group_file_path, group_entries)) = groups.last_mut() + && *group_file_path == entry.file_path + { + group_entries.push(entry); + continue; + } + groups.push((entry.file_path.clone(), vec![entry])); + } + let single_file = groups.len() == 1; + let mut lines = Vec::new(); + for (index, (file_path, group_entries)) in groups.into_iter().enumerate() { + if !single_file { + if index > 0 { + lines.push(String::new()); + } + lines.push(file_path); + } + for entry in group_entries { + lines.push(render_porcelain_diagnostic_line(entry)); + } + } + lines +} + +pub(super) fn render_porcelain_diagnostic_line(entry: CompactDiagnosticOutput) -> String { + let CompactDiagnosticOutput { + severity, + file_path: _file_path, + start_line, + start_column, + end_line, + end_column, + code, + message, + } = entry; + let code = code.map_or(String::new(), |code| format!(" [{code}]")); + let message = porcelain_message(message.as_str()); + format!( + "{} {}:{}-{}:{}{} {}", + diagnostic_level_label(severity), + start_line, + start_column, + end_line, + end_column, + code, + message + ) +} + +pub(super) fn porcelain_counts_line(counts: DiagnosticsCountsOutput) -> String { + let DiagnosticsCountsOutput { + error_count, + warning_count, + information_count, + hint_count, + total_count, + } = counts; + format!( + "{error_count} errors, {warning_count} warnings, {information_count} information, {hint_count} hints ({total_count} total)" + ) +} + +pub(super) fn diagnostic_level_label(level: DiagnosticLevelOutput) -> &'static str { + match level { + DiagnosticLevelOutput::Error => "error", + DiagnosticLevelOutput::Warning => "warning", + DiagnosticLevelOutput::Information => "information", + DiagnosticLevelOutput::Hint => "hint", + } +} + +pub(super) fn porcelain_message(raw: &str) -> String { + collapse_inline_whitespace(raw) +} + +pub(super) fn location_output(file_path: &SourceFilePath, point: SourcePoint) -> LocationOutput { + LocationOutput { + file_path: file_path.as_path().display().to_string(), + line: point.line().get(), + column: point.column().get(), + } +} + +impl From<SourceLocation> for LocationOutput { + fn from(value: SourceLocation) -> Self { + location_output(value.file_path(), value.point()) + } +} + +impl From<SourceRange> for RangeOutput { + fn from(value: SourceRange) -> Self { + Self { + file_path: value.file_path().as_path().display().to_string(), + start: location_output(value.file_path(), value.start()), + end: location_output(value.file_path(), value.end()), + } + } +} + +impl From<HoverPayload> for HoverOutput { + fn from(value: HoverPayload) -> Self { + Self { + rendered: value.rendered, + range: value.range.map(RangeOutput::from), + uncertain_empty: false, + note: None, + } + } +} + +impl From<DiagnosticLevel> for DiagnosticLevelOutput { + fn from(value: DiagnosticLevel) -> Self { + match value { + DiagnosticLevel::Error => Self::Error, + DiagnosticLevel::Warning => Self::Warning, + DiagnosticLevel::Information => Self::Information, + DiagnosticLevel::Hint => Self::Hint, + } + } +} + +impl From<DiagnosticEntry> for DiagnosticOutput { + fn from(value: DiagnosticEntry) -> Self { + Self { + range: RangeOutput::from(value.range), + level: DiagnosticLevelOutput::from(value.level), + code: value.code, + message: value.message, + } + } +} + +impl From<RenameReport> for RenameOutput { + fn from(value: RenameReport) -> Self { + Self { + files_touched: value.files_touched, + edits_applied: value.edits_applied, + } + } +} + +impl From<Fault> for FaultOutput { + fn from(value: Fault) -> Self { + Self { + class: format!("{:?}", value.class), + code: format!("{:?}", value.code), + detail: value.detail.message, + } + } +} + +impl From<LifecycleSnapshot> for HealthOutput { + fn from(value: LifecycleSnapshot) -> Self { + match value { + LifecycleSnapshot::Cold { generation } => Self { + state: HealthStateOutput::Cold, + generation: generation.get(), + last_fault: None, + }, + LifecycleSnapshot::Starting { generation } => Self { + state: HealthStateOutput::Starting, + generation: generation.get(), + last_fault: None, + }, + LifecycleSnapshot::Ready { generation } => Self { + state: HealthStateOutput::Ready, + generation: generation.get(), + last_fault: None, + }, + LifecycleSnapshot::Recovering { + generation, + last_fault, + } => Self { + state: HealthStateOutput::Recovering, + generation: generation.get(), + last_fault: Some(FaultOutput::from(last_fault)), + }, + } + } +} diff --git a/crates/adequate-rust-mcp/src/worker/errors.rs b/crates/adequate-rust-mcp/src/worker/errors.rs new file mode 100644 index 0000000..e51dc62 --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/errors.rs @@ -0,0 +1,150 @@ +use super::{EngineError, McpError}; +use serde::Serialize; +use serde_json::{Value, json}; + +#[derive(Debug, Clone, Copy, Serialize)] +#[serde(rename_all = "snake_case")] +pub(super) enum PorcelainErrorKind { + InvalidInput, + PathNotFound, + PathNotFile, + PositionOutOfRange, + TransientRetryable, + ToolRuntimeFailure, + InternalFailure, + PorcelainFallback, +} + +pub(super) fn porcelain_error_data( + kind: PorcelainErrorKind, + hint: Option<&'static str>, + retryable: bool, +) -> Value { + let mut data = json!({ + "kind": kind, + }); + if let Some(hint) = hint { + data["hint"] = json!(hint); + } + if retryable { + data["retryable"] = json!(true); + } + data +} + +pub(super) fn porcelain_invalid_params( + message: impl Into<String>, + kind: PorcelainErrorKind, + hint: Option<&'static str>, +) -> McpError { + McpError::invalid_params( + message.into(), + Some(porcelain_error_data(kind, hint, false)), + ) +} + +pub(super) fn porcelain_internal_error( + message: impl Into<String>, + kind: PorcelainErrorKind, + hint: Option<&'static str>, + retryable: bool, +) -> McpError { + McpError::internal_error( + message.into(), + Some(porcelain_error_data(kind, hint, retryable)), + ) +} + +pub(super) fn is_position_out_of_range_lsp_error(message: &str) -> bool { + let normalized = message.to_ascii_lowercase(); + normalized.contains("invalid offset") + || normalized.contains("line index length") + || normalized.contains("linecol {") + || normalized.contains("position is out of range") +} + +pub(super) fn is_transient_lsp_error(code: i64, message: &str) -> bool { + let normalized = message.to_ascii_lowercase(); + code == -32801 + || code == -32802 + || normalized.contains("content modified") + || normalized.contains("document changed") + || normalized.contains("server cancelled") + || normalized.contains("request cancelled") + || normalized.contains("request canceled") +} + +pub(super) fn map_engine_error(error: EngineError) -> McpError { + match error { + EngineError::Io(io_error) => match io_error.kind() { + std::io::ErrorKind::NotFound => porcelain_invalid_params( + "requested source file does not exist", + PorcelainErrorKind::PathNotFound, + Some("verify the file path or URI"), + ), + std::io::ErrorKind::InvalidInput => porcelain_invalid_params( + "invalid source file input", + PorcelainErrorKind::InvalidInput, + Some("use an absolute path, file URI, or workspace-relative path"), + ), + _ => porcelain_internal_error( + "source file operation failed", + PorcelainErrorKind::PorcelainFallback, + Some("retry once; if it persists, inspect worker logs"), + false, + ), + }, + EngineError::Invariant(_) => porcelain_internal_error( + "internal invariant check failed while handling request", + PorcelainErrorKind::InternalFailure, + Some("retry once; if it persists, inspect worker logs"), + false, + ), + EngineError::InvalidPayload { .. } => porcelain_invalid_params( + "received unexpected response payload from rust-analyzer", + PorcelainErrorKind::InvalidInput, + Some("retry once; if it persists, refresh rust-analyzer state"), + ), + EngineError::InvalidFileUrl => porcelain_invalid_params( + "source path cannot be represented as a local file URL", + PorcelainErrorKind::InvalidInput, + Some("use a local file path or file:// URI"), + ), + EngineError::Fault(_) => porcelain_internal_error( + "rust-analyzer worker reported a transport/process fault", + PorcelainErrorKind::TransientRetryable, + Some("retry after worker recovery"), + true, + ), + EngineError::LspResponse(error) => { + let code = error.code; + let message = error.message.as_str(); + if is_position_out_of_range_lsp_error(message) { + porcelain_invalid_params( + "requested line/column is outside the file bounds", + PorcelainErrorKind::PositionOutOfRange, + Some("pick a position inside the current file contents"), + ) + } else if is_transient_lsp_error(code, message) { + porcelain_internal_error( + "request could not complete because rust-analyzer cancelled or invalidated it", + PorcelainErrorKind::TransientRetryable, + Some("retry after rust-analyzer settles"), + true, + ) + } else { + porcelain_internal_error( + format!("rust-analyzer request failed (code={code})"), + PorcelainErrorKind::PorcelainFallback, + Some("retry once; if it persists, inspect worker logs"), + false, + ) + } + } + EngineError::InvalidRequest { .. } => porcelain_invalid_params( + "failed to encode request payload for rust-analyzer", + PorcelainErrorKind::InvalidInput, + Some("inspect tool arguments and retry"), + ), + } +} diff --git a/crates/adequate-rust-mcp/src/worker/input.rs b/crates/adequate-rust-mcp/src/worker/input.rs new file mode 100644 index 0000000..93245a1 --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/input.rs @@ -0,0 +1,452 @@ +use super::{ + AbsolutePathInput, AdvancedLspMethod, DiagnosticFileTargetsRaw, DiagnosticsInput, + DiagnosticsRenderConfig, OneIndexedInput, OneIndexedRawInput, PorcelainErrorKind, + SnapshotRenderInput, SourceFilePath, SourcePoint, SourcePosition, SymbolPositionInput, + SymbolQueryInput, porcelain_invalid_params, resolve_workspace_root_path, +}; +use libmcp::{ + PathNormalizeError, normalize_ascii_token as normalize_user_method_token, normalize_local_path, + parse_human_unsigned_u64, saturating_u64_to_usize, +}; +use ra_mcp_domain::types::{OneIndexedColumn, OneIndexedLine}; +use rmcp::ErrorData as McpError; +use serde::{Deserialize, Deserializer}; +use serde_json::Value; + +impl AbsolutePathInput { + pub(super) fn into_source_file_path(self) -> Result<SourceFilePath, McpError> { + let raw = self.0.trim(); + if raw.is_empty() { + return Err(porcelain_invalid_params( + "source file path must be non-empty", + PorcelainErrorKind::InvalidInput, + Some("pass file_path as an absolute path, file URI, or workspace-relative path"), + )); + } + + let workspace_root = resolve_workspace_root_path()?; + let absolute_path = normalize_local_path(raw, Some(workspace_root.as_path())).map_err( + |error| match error { + PathNormalizeError::Empty => porcelain_invalid_params( + "source file path must be non-empty", + PorcelainErrorKind::InvalidInput, + Some( + "pass file_path as an absolute path, file URI, or workspace-relative path", + ), + ), + PathNormalizeError::InvalidFileUri => porcelain_invalid_params( + "file URI is invalid", + PorcelainErrorKind::InvalidInput, + Some("ensure file_path is a valid file:// URI"), + ), + PathNormalizeError::NonLocalFileUri => porcelain_invalid_params( + "file URI must resolve to a local path", + PorcelainErrorKind::InvalidInput, + Some("use a local file:// URI"), + ), + }, + )?; + + SourceFilePath::try_new(absolute_path).map_err(|_| { + porcelain_invalid_params( + "source file path is invalid", + PorcelainErrorKind::InvalidInput, + Some("use a normalized local source file path"), + ) + }) + } +} + +impl OneIndexedInput { + pub(super) const fn normalized_for_one_indexed(self) -> u64 { + if self.0 == 0 { 1 } else { self.0 } + } +} + +impl<'de> Deserialize<'de> for OneIndexedInput { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + let raw = OneIndexedRawInput::deserialize(deserializer)?; + let parsed = match raw { + OneIndexedRawInput::Unsigned(value) => Some(value), + OneIndexedRawInput::Float(value) => { + if value.is_finite() && value >= 0.0 && value.fract() == 0.0 { + let max = u64::MAX as f64; + if value <= max { + Some(value as u64) + } else { + None + } + } else { + None + } + } + OneIndexedRawInput::Text(text) => parse_human_unsigned_u64(text.as_str()), + }; + + parsed.map(Self).ok_or_else(|| { + serde::de::Error::custom( + "expected non-negative integer index (number, integer-like float, or numeric string)", + ) + }) + } +} + +impl SymbolPositionInput { + pub(super) fn into_source_position(self) -> Result<SourcePosition, McpError> { + let line = + OneIndexedLine::try_new(self.line.normalized_for_one_indexed()).map_err(|_| { + porcelain_invalid_params( + "line index is out of supported bounds", + PorcelainErrorKind::InvalidInput, + Some("line must be >= 1"), + ) + })?; + let column = + OneIndexedColumn::try_new(self.column.normalized_for_one_indexed()).map_err(|_| { + porcelain_invalid_params( + "column index is out of supported bounds", + PorcelainErrorKind::InvalidInput, + Some("column must be >= 1"), + ) + })?; + Ok(SourcePosition::new( + self.file_path.into_source_file_path()?, + SourcePoint::new(line, column), + )) + } +} + +impl SymbolQueryInput { + pub(super) fn into_request( + self, + ) -> Result<(SourcePosition, super::CommonRenderConfig), McpError> { + let Self { + position, + render, + path_style, + } = self; + let position = position.into_source_position()?; + Ok(( + position, + super::CommonRenderConfig::from_user_input(render, path_style), + )) + } +} + +impl SnapshotRenderInput { + pub(super) fn render(self) -> super::CommonRenderInput { + self.render.unwrap_or(super::CommonRenderInput::Porcelain) + } +} + +impl DiagnosticsInput { + pub(super) fn into_request( + self, + ) -> Result<(Vec<SourceFilePath>, DiagnosticsRenderConfig), McpError> { + let Self { + file_paths, + mode, + render, + max_items, + max_message_chars, + path_style, + } = self; + if file_paths.is_empty() { + return Err(porcelain_invalid_params( + "at least one file path is required", + PorcelainErrorKind::InvalidInput, + Some("pass file_path or file_paths"), + )); + } + let file_paths = file_paths + .into_iter() + .map(AbsolutePathInput::into_source_file_path) + .collect::<Result<Vec<_>, McpError>>()?; + validate_requested_file_paths(file_paths.as_slice())?; + let defaults = DiagnosticsRenderConfig::default(); + let render_config = DiagnosticsRenderConfig { + mode: mode.unwrap_or(defaults.mode), + render: render.unwrap_or(defaults.render), + max_items: max_items.map(saturating_u64_to_usize), + max_message_chars: max_message_chars.map(saturating_u64_to_usize), + path_style, + }; + Ok((file_paths, render_config)) + } +} + +impl AdvancedLspMethod { + const ALL: [Self; 45] = [ + Self::Hover, + Self::Definition, + Self::References, + Self::Declaration, + Self::TypeDefinition, + Self::Implementation, + Self::Completion, + Self::CompletionResolve, + Self::SignatureHelp, + Self::DocumentSymbol, + Self::WorkspaceSymbol, + Self::WorkspaceSymbolResolve, + Self::PrepareRename, + Self::Rename, + Self::CodeAction, + Self::CodeActionResolve, + Self::CodeLens, + Self::CodeLensResolve, + Self::ExecuteCommand, + Self::Formatting, + Self::RangeFormatting, + Self::OnTypeFormatting, + Self::DocumentHighlight, + Self::DocumentLink, + Self::DocumentLinkResolve, + Self::DocumentColor, + Self::ColorPresentation, + Self::LinkedEditingRange, + Self::InlayHint, + Self::InlayHintResolve, + Self::FoldingRange, + Self::SelectionRange, + Self::DocumentDiagnostic, + Self::WorkspaceDiagnostic, + Self::SemanticTokensFull, + Self::SemanticTokensFullDelta, + Self::SemanticTokensRange, + Self::Moniker, + Self::InlineValue, + Self::TypeHierarchyPrepare, + Self::TypeHierarchySupertypes, + Self::TypeHierarchySubtypes, + Self::CallHierarchyPrepare, + Self::CallHierarchyIncomingCalls, + Self::CallHierarchyOutgoingCalls, + ]; + + const fn canonical_input_name(&self) -> &'static str { + match self { + Self::Hover => "hover", + Self::Definition => "definition", + Self::References => "references", + Self::Declaration => "declaration", + Self::TypeDefinition => "type_definition", + Self::Implementation => "implementation", + Self::Completion => "completion", + Self::CompletionResolve => "completion_resolve", + Self::SignatureHelp => "signature_help", + Self::DocumentSymbol => "document_symbol", + Self::WorkspaceSymbol => "workspace_symbol", + Self::WorkspaceSymbolResolve => "workspace_symbol_resolve", + Self::PrepareRename => "prepare_rename", + Self::Rename => "rename", + Self::CodeAction => "code_action", + Self::CodeActionResolve => "code_action_resolve", + Self::CodeLens => "code_lens", + Self::CodeLensResolve => "code_lens_resolve", + Self::ExecuteCommand => "execute_command", + Self::Formatting => "formatting", + Self::RangeFormatting => "range_formatting", + Self::OnTypeFormatting => "on_type_formatting", + Self::DocumentHighlight => "document_highlight", + Self::DocumentLink => "document_link", + Self::DocumentLinkResolve => "document_link_resolve", + Self::DocumentColor => "document_color", + Self::ColorPresentation => "color_presentation", + Self::LinkedEditingRange => "linked_editing_range", + Self::InlayHint => "inlay_hint", + Self::InlayHintResolve => "inlay_hint_resolve", + Self::FoldingRange => "folding_range", + Self::SelectionRange => "selection_range", + Self::DocumentDiagnostic => "document_diagnostic", + Self::WorkspaceDiagnostic => "workspace_diagnostic", + Self::SemanticTokensFull => "semantic_tokens_full", + Self::SemanticTokensFullDelta => "semantic_tokens_full_delta", + Self::SemanticTokensRange => "semantic_tokens_range", + Self::Moniker => "moniker", + Self::InlineValue => "inline_value", + Self::TypeHierarchyPrepare => "type_hierarchy_prepare", + Self::TypeHierarchySupertypes => "type_hierarchy_supertypes", + Self::TypeHierarchySubtypes => "type_hierarchy_subtypes", + Self::CallHierarchyPrepare => "call_hierarchy_prepare", + Self::CallHierarchyIncomingCalls => "call_hierarchy_incoming_calls", + Self::CallHierarchyOutgoingCalls => "call_hierarchy_outgoing_calls", + } + } + + fn from_user_input(raw: &str) -> Option<Self> { + let normalized = normalize_user_method_token(raw); + if normalized.is_empty() { + return None; + } + Self::ALL.into_iter().find(|candidate| { + normalized == normalize_user_method_token(candidate.canonical_input_name()) + || normalized == normalize_user_method_token(candidate.as_lsp_method()) + }) + } + + pub(super) const fn as_lsp_method(&self) -> &'static str { + match self { + Self::Hover => "textDocument/hover", + Self::Definition => "textDocument/definition", + Self::References => "textDocument/references", + Self::Declaration => "textDocument/declaration", + Self::TypeDefinition => "textDocument/typeDefinition", + Self::Implementation => "textDocument/implementation", + Self::Completion => "textDocument/completion", + Self::CompletionResolve => "completionItem/resolve", + Self::SignatureHelp => "textDocument/signatureHelp", + Self::DocumentSymbol => "textDocument/documentSymbol", + Self::WorkspaceSymbol => "workspace/symbol", + Self::WorkspaceSymbolResolve => "workspaceSymbol/resolve", + Self::PrepareRename => "textDocument/prepareRename", + Self::Rename => "textDocument/rename", + Self::CodeAction => "textDocument/codeAction", + Self::CodeActionResolve => "codeAction/resolve", + Self::CodeLens => "textDocument/codeLens", + Self::CodeLensResolve => "codeLens/resolve", + Self::ExecuteCommand => "workspace/executeCommand", + Self::Formatting => "textDocument/formatting", + Self::RangeFormatting => "textDocument/rangeFormatting", + Self::OnTypeFormatting => "textDocument/onTypeFormatting", + Self::DocumentHighlight => "textDocument/documentHighlight", + Self::DocumentLink => "textDocument/documentLink", + Self::DocumentLinkResolve => "documentLink/resolve", + Self::DocumentColor => "textDocument/documentColor", + Self::ColorPresentation => "textDocument/colorPresentation", + Self::LinkedEditingRange => "textDocument/linkedEditingRange", + Self::InlayHint => "textDocument/inlayHint", + Self::InlayHintResolve => "inlayHint/resolve", + Self::FoldingRange => "textDocument/foldingRange", + Self::SelectionRange => "textDocument/selectionRange", + Self::DocumentDiagnostic => "textDocument/diagnostic", + Self::WorkspaceDiagnostic => "workspace/diagnostic", + Self::SemanticTokensFull => "textDocument/semanticTokens/full", + Self::SemanticTokensFullDelta => "textDocument/semanticTokens/full/delta", + Self::SemanticTokensRange => "textDocument/semanticTokens/range", + Self::Moniker => "textDocument/moniker", + Self::InlineValue => "textDocument/inlineValue", + Self::TypeHierarchyPrepare => "textDocument/prepareTypeHierarchy", + Self::TypeHierarchySupertypes => "typeHierarchy/supertypes", + Self::TypeHierarchySubtypes => "typeHierarchy/subtypes", + Self::CallHierarchyPrepare => "textDocument/prepareCallHierarchy", + Self::CallHierarchyIncomingCalls => "callHierarchy/incomingCalls", + Self::CallHierarchyOutgoingCalls => "callHierarchy/outgoingCalls", + } + } +} + +impl<'de> Deserialize<'de> for AdvancedLspMethod { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + let raw = String::deserialize(deserializer)?; + Self::from_user_input(raw.as_str()).ok_or_else(|| { + serde::de::Error::custom(format!( + "unsupported advanced method `{raw}`; use tools/list for canonical method names (snake_case, camelCase, kebab-case, and full LSP paths are accepted)" + )) + }) + } +} + +pub(super) fn deserialize_advanced_lsp_params<'de, D>(deserializer: D) -> Result<Value, D::Error> +where + D: Deserializer<'de>, +{ + let raw = Value::deserialize(deserializer)?; + normalize_advanced_lsp_params(raw).map_err(serde::de::Error::custom) +} + +pub(super) fn deserialize_diagnostic_file_targets<'de, D>( + deserializer: D, +) -> Result<Vec<AbsolutePathInput>, D::Error> +where + D: Deserializer<'de>, +{ + let raw = Option::<DiagnosticFileTargetsRaw>::deserialize(deserializer)?; + let parsed = match raw { + None => Vec::new(), + Some(DiagnosticFileTargetsRaw::Single(path)) => vec![AbsolutePathInput(path)], + Some(DiagnosticFileTargetsRaw::Many(paths)) => { + paths.into_iter().map(AbsolutePathInput).collect::<Vec<_>>() + } + }; + Ok(parsed) +} + +pub(super) fn deserialize_optional_human_unsigned_u64<'de, D>( + deserializer: D, +) -> Result<Option<u64>, D::Error> +where + D: Deserializer<'de>, +{ + let raw = Option::<OneIndexedRawInput>::deserialize(deserializer)?; + let parsed = raw.map(|value| match value { + OneIndexedRawInput::Unsigned(number) => Some(number), + OneIndexedRawInput::Float(number) => { + if number.is_finite() && number >= 0.0 && number.fract() == 0.0 { + let max = u64::MAX as f64; + if number <= max { + Some(number as u64) + } else { + None + } + } else { + None + } + } + OneIndexedRawInput::Text(text) => parse_human_unsigned_u64(text.as_str()), + }); + if parsed.is_some() && parsed.flatten().is_none() { + return Err(serde::de::Error::custom( + "expected non-negative integer limit (number, integer-like float, or numeric string)", + )); + } + Ok(parsed.flatten()) +} + +pub(super) fn validate_requested_file_paths(file_paths: &[SourceFilePath]) -> Result<(), McpError> { + for file_path in file_paths { + let raw_path = file_path.as_path(); + if !raw_path.exists() { + return Err(porcelain_invalid_params( + format!("requested file path does not exist: {}", raw_path.display()), + PorcelainErrorKind::PathNotFound, + Some("verify the requested file path"), + )); + } + if !raw_path.is_file() { + return Err(porcelain_invalid_params( + format!("requested path is not a file: {}", raw_path.display()), + PorcelainErrorKind::PathNotFile, + Some("diagnostics require source file paths"), + )); + } + } + Ok(()) +} + +pub(super) fn normalize_advanced_lsp_params(raw: Value) -> Result<Value, String> { + match raw { + Value::String(text) => { + let trimmed = text.trim(); + if trimmed.is_empty() { + return Ok(Value::String(text)); + } + let starts_like_json = trimmed + .as_bytes() + .first() + .is_some_and(|first| matches!(*first, b'{' | b'[')); + if !starts_like_json { + return Ok(Value::String(text)); + } + serde_json::from_str::<Value>(trimmed) + .map_err(|error| format!("invalid JSON payload string: {error}")) + } + other => Ok(other), + } +} diff --git a/crates/adequate-rust-mcp/src/worker/mod.rs b/crates/adequate-rust-mcp/src/worker/mod.rs new file mode 100644 index 0000000..ea3c2ab --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/mod.rs @@ -0,0 +1,73 @@ +//! Worker MCP server exposing robust rust-analyzer tools. + +mod clippy; +mod diagnostics; +mod errors; +mod input; +mod porcelain; +mod schema; +mod server; +mod telemetry; +#[cfg(test)] +mod tests; +mod workspace; + +pub(crate) use server::run_worker; + +use clippy::collect_clippy_diagnostics; +#[cfg(test)] +use clippy::parse_clippy_json_stream; +use diagnostics::{ + DiagnosticsRenderConfig, diagnostics_call_tool_result, diagnostics_json_output_schema, + elapsed_millis_saturating, +}; +use errors::{ + PorcelainErrorKind, map_engine_error, porcelain_internal_error, porcelain_invalid_params, +}; +use input::{ + deserialize_advanced_lsp_params, deserialize_diagnostic_file_targets, + deserialize_optional_human_unsigned_u64, +}; +use porcelain::{ + CommonRenderConfig, EMPTY_RESULT_MAX_RETRIES, EMPTY_RESULT_RETRY_DELAY_MS, + call_tool_result_with_render, hover_output_from_payload, json_output_schema, + locations_output_from_sources, render_definition_porcelain, render_health_porcelain, + render_hover_porcelain, render_references_porcelain, render_telemetry_porcelain, + should_retry_empty_symbol_result, +}; +use schema::{ + AbsolutePathInput, AdvancedLspMethod, AdvancedLspRequestInput, AdvancedLspResponseOutput, + CargoJsonMessageWire, CommonRenderInput, CompactDiagnosticOutput, DiagnosticFileTargetsRaw, + DiagnosticLevelOutput, DiagnosticOutput, DiagnosticsCountsOutput, DiagnosticsInput, + DiagnosticsJsonOutput, DiagnosticsModeInput, DiagnosticsModeOutput, DiagnosticsPathStyleInput, + DiagnosticsRenderInput, FaultOutput, FixEverythingInput, FixEverythingOutput, FixStepOutput, + FlattenedDiagnostics, HealthOutput, HealthStateOutput, HoverOutput, LocationOutput, + LocationsOutput, MethodTelemetryOutput, OneIndexedInput, OneIndexedRawInput, PathStyleInput, + RangeOutput, RenameInput, RenameOutput, RustcSpanWire, SnapshotRenderInput, + SymbolPositionInput, SymbolQueryInput, TelemetryOutput, TelemetryTotalsOutput, +}; +use telemetry::ToolTelemetryState; +#[cfg(test)] +use workspace::{ + read_workspace_tool_command, read_workspace_tool_metadata, resolve_workspace_fix_command_specs, +}; +use workspace::{ + resolve_clippy_command_spec, resolve_workspace_root_path, run_workspace_command, + run_workspace_fix_everything, +}; + +use ra_mcp_domain::{ + fault::Fault, + lifecycle::LifecycleSnapshot, + types::{ + SourceFilePath, SourceLocation, SourcePoint, SourcePosition, SourceRange, WorkspaceRoot, + }, +}; +use ra_mcp_engine::{ + DiagnosticEntry, DiagnosticLevel, DiagnosticsReport, Engine, EngineConfig, EngineError, + HoverPayload, MethodTelemetrySnapshot, RenameReport, TelemetrySnapshot, TelemetryTotals, +}; +use rmcp::{ + ErrorData as McpError, Json, ServerHandler, ServiceExt, + model::{Content, ServerInfo}, +}; diff --git a/crates/adequate-rust-mcp/src/worker/porcelain.rs b/crates/adequate-rust-mcp/src/worker/porcelain.rs new file mode 100644 index 0000000..9efbd15 --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/porcelain.rs @@ -0,0 +1,488 @@ +use super::{ + CommonRenderInput, FaultOutput, HealthOutput, HoverOutput, HoverPayload, LocationOutput, + LocationsOutput, McpError, PathStyleInput, PorcelainErrorKind, RangeOutput, SourceFilePath, + SourceLocation, SourcePoint, SourceRange, TelemetryOutput, porcelain_internal_error, + resolve_workspace_root_path, +}; +use libmcp::{RenderMode, collapse_inline_whitespace}; +use rmcp::model::{CallToolResult, Content}; +use schemars::JsonSchema; +use serde::Serialize; +use std::{ + fs, + path::{Path, PathBuf}, + sync::Arc, +}; + +const PORCELAIN_LOCATION_LIMIT: usize = 8; +const PORCELAIN_SNIPPET_CHAR_LIMIT: usize = 120; +pub(super) const EMPTY_RESULT_WARMUP_WINDOW_MS: u64 = 5_000; +pub(super) const EMPTY_RESULT_RETRY_DELAY_MS: u64 = 500; +pub(super) const EMPTY_RESULT_MAX_RETRIES: u8 = 4; +pub(super) const UNCERTAIN_EMPTY_NOTE: &str = + "result may be incomplete during indexing; retry in a few seconds"; + +pub(super) type CommonRenderConfig = libmcp::RenderConfig; + +pub(super) fn json_output_schema<T>() -> Arc<rmcp::model::JsonObject> +where + T: JsonSchema + 'static, +{ + rmcp::handler::server::tool::schema_for_output::<T>().unwrap_or_else(|error| { + tracing::error!( + ?error, + "failed to build JSON output schema; falling back to empty schema" + ); + Arc::default() + }) +} + +pub(super) fn call_tool_result_with_render<T>( + value: T, + render: CommonRenderInput, + porcelain: String, +) -> Result<CallToolResult, McpError> +where + T: Serialize, +{ + match render { + RenderMode::Porcelain => Ok(CallToolResult::success(vec![Content::text(porcelain)])), + RenderMode::Json => { + let value = serde_json::to_value(value).map_err(|error| { + porcelain_internal_error( + format!("failed to serialize JSON output: {error}"), + PorcelainErrorKind::InternalFailure, + Some("retry once; if it persists, inspect worker logs"), + false, + ) + })?; + Ok(CallToolResult::structured(value)) + } + } +} + +pub(super) fn should_retry_empty_symbol_result(uptime_ms: u64) -> bool { + uptime_ms < EMPTY_RESULT_WARMUP_WINDOW_MS +} + +pub(super) fn uncertain_empty_note(uncertain_empty: bool) -> Option<String> { + uncertain_empty.then(|| UNCERTAIN_EMPTY_NOTE.to_owned()) +} + +pub(super) fn locations_output_from_sources( + locations: Vec<SourceLocation>, + render_config: CommonRenderConfig, + uncertain_empty: bool, +) -> LocationsOutput { + LocationsOutput { + locations: locations + .into_iter() + .map(|value| location_output(value.file_path(), value.point(), render_config)) + .collect(), + uncertain_empty, + note: uncertain_empty_note(uncertain_empty), + } +} + +pub(super) fn hover_output_from_payload( + payload: HoverPayload, + render_config: CommonRenderConfig, + uncertain_empty: bool, +) -> HoverOutput { + HoverOutput { + rendered: payload.rendered, + range: payload + .range + .map(|value| range_output(value, render_config)), + uncertain_empty, + note: uncertain_empty_note(uncertain_empty), + } +} + +pub(super) fn render_hover_porcelain( + payload: &HoverPayload, + render_config: CommonRenderConfig, + uncertain_empty: bool, +) -> String { + let summary = payload + .rendered + .as_deref() + .and_then(hover_summary_line) + .unwrap_or_else(|| "no hover information".to_owned()); + let mut lines = vec![summary]; + if let Some(range) = payload.range.as_ref() { + lines.push(render_range_brief(range, render_config)); + } + if uncertain_empty { + lines.push(format!("note: {UNCERTAIN_EMPTY_NOTE}")); + } + lines.join("\n") +} + +pub(super) fn render_definition_porcelain( + locations: &[SourceLocation], + render_config: CommonRenderConfig, + uncertain_empty: bool, +) -> String { + render_location_list_porcelain("definition", locations, render_config, uncertain_empty) +} + +pub(super) fn render_references_porcelain( + locations: &[SourceLocation], + render_config: CommonRenderConfig, + uncertain_empty: bool, +) -> String { + render_location_list_porcelain("reference", locations, render_config, uncertain_empty) +} + +pub(super) fn render_health_porcelain(output: &HealthOutput) -> String { + let HealthOutput { + state, + generation, + last_fault, + } = output; + let mut line = format!("{} gen={generation}", render_health_state(state.clone())); + if let Some(last_fault) = last_fault.as_ref() { + line.push_str(format!(" last_fault={}", render_fault_brief(last_fault)).as_str()); + } + line +} + +pub(super) fn render_telemetry_porcelain(output: &TelemetryOutput) -> String { + let TelemetryOutput { + uptime_ms, + state, + generation, + consecutive_failures, + restart_count, + totals, + methods, + last_fault, + } = output; + let mut lines = vec![ + format!( + "{} gen={} uptime={} restarts={} failures={}", + render_health_state(state.clone()), + generation, + render_duration_ms(*uptime_ms), + restart_count, + consecutive_failures + ), + format!( + "totals requests={} ok={} response_errors={} transport_faults={} retries={}", + totals.request_count, + totals.success_count, + totals.response_error_count, + totals.transport_fault_count, + totals.retry_count + ), + ]; + let mut slowest = methods.clone(); + slowest.sort_by(|left, right| { + right + .avg_latency_ms + .cmp(&left.avg_latency_ms) + .then_with(|| right.request_count.cmp(&left.request_count)) + .then_with(|| left.method.cmp(&right.method)) + }); + let visible = slowest + .into_iter() + .filter(|method| method.request_count > 0) + .take(3) + .collect::<Vec<_>>(); + if !visible.is_empty() { + lines.push("slowest".to_owned()); + lines.extend(visible.into_iter().map(|method| { + format!( + "- {} avg={} max={} n={}", + method.method, + render_duration_ms(method.avg_latency_ms), + render_duration_ms(method.max_latency_ms), + method.request_count + ) + })); + } + if let Some(last_fault) = last_fault.as_ref() { + lines.push(format!("last_fault {}", render_fault_brief(last_fault))); + } + lines.join("\n") +} + +pub(super) fn location_output( + file_path: &SourceFilePath, + point: SourcePoint, + render_config: CommonRenderConfig, +) -> LocationOutput { + LocationOutput { + file_path: render_source_path(file_path, render_config.path_style), + line: point.line().get(), + column: point.column().get(), + } +} + +pub(super) fn range_output(value: SourceRange, render_config: CommonRenderConfig) -> RangeOutput { + RangeOutput { + file_path: render_source_path(value.file_path(), render_config.path_style), + start: location_output(value.file_path(), value.start(), render_config), + end: location_output(value.file_path(), value.end(), render_config), + } +} + +fn render_location_list_porcelain( + noun: &str, + locations: &[SourceLocation], + render_config: CommonRenderConfig, + uncertain_empty: bool, +) -> String { + let count = locations.len(); + let header = format!("{count} {}{}", noun, if count == 1 { "" } else { "s" }); + if count == 0 { + if uncertain_empty { + return format!("{header}\nnote: {UNCERTAIN_EMPTY_NOTE}"); + } + return header; + } + + let workspace_root = common_workspace_root(render_config.path_style); + let mut sorted = locations.to_vec(); + sorted.sort_by(|left, right| { + location_sort_key(left, workspace_root.as_deref()) + .cmp(&location_sort_key(right, workspace_root.as_deref())) + }); + + let visible = sorted + .into_iter() + .take(PORCELAIN_LOCATION_LIMIT) + .collect::<Vec<_>>(); + let omitted = count.saturating_sub(visible.len()); + + let single_file = visible + .first() + .map(|first| { + visible + .iter() + .all(|location| location.file_path() == first.file_path()) + }) + .unwrap_or(false); + + let mut lines = vec![header, String::new()]; + if single_file { + lines.extend( + visible.into_iter().map(|location| { + format!("- {}", render_location_entry(location, render_config, true)) + }), + ); + } else { + let mut current_file: Option<String> = None; + for location in visible { + let file = render_source_path(location.file_path(), render_config.path_style); + if current_file.as_deref() != Some(file.as_str()) { + if current_file.is_some() { + lines.push(String::new()); + } + lines.push(file.clone()); + current_file = Some(file); + } + lines.push(format!( + "- {}", + render_location_entry(location, render_config, false) + )); + } + } + if omitted > 0 { + lines.push(String::new()); + lines.push(format!("... +{omitted} more")); + } + lines.join("\n") +} + +fn render_location_entry( + location: SourceLocation, + render_config: CommonRenderConfig, + omit_path: bool, +) -> String { + let point = location.point(); + let prefix = if omit_path { + format!("{}:{}", point.line().get(), point.column().get()) + } else { + format!( + "{}:{}:{}", + render_source_path(location.file_path(), render_config.path_style), + point.line().get(), + point.column().get() + ) + }; + let snippet = source_line_snippet(location.file_path(), point.line().get()) + .map(truncate_compact) + .unwrap_or_else(|| render_source_path(location.file_path(), render_config.path_style)); + format!("{prefix} {snippet}") +} + +fn render_range_brief(range: &SourceRange, render_config: CommonRenderConfig) -> String { + format!( + "{}:{}:{}-{}:{}", + render_source_path(range.file_path(), render_config.path_style), + range.start().line().get(), + range.start().column().get(), + range.end().line().get(), + range.end().column().get() + ) +} + +fn render_health_state(state: super::HealthStateOutput) -> &'static str { + match state { + super::HealthStateOutput::Cold => "cold", + super::HealthStateOutput::Starting => "starting", + super::HealthStateOutput::Ready => "ready", + super::HealthStateOutput::Recovering => "recovering", + } +} + +fn render_fault_brief(fault: &FaultOutput) -> String { + format!( + "{}:{} {}", + fault.class.to_ascii_lowercase(), + fault.code.to_ascii_lowercase(), + squash_whitespace(fault.detail.as_str()) + ) +} + +fn render_duration_ms(duration_ms: u64) -> String { + if duration_ms < 1_000 { + return format!("{duration_ms}ms"); + } + let seconds = duration_ms / 1_000; + let millis = duration_ms % 1_000; + if seconds < 60 { + return format!("{seconds}.{millis:03}s"); + } + let minutes = seconds / 60; + let remainder_seconds = seconds % 60; + format!("{minutes}m{remainder_seconds:02}s") +} + +fn hover_summary_line(rendered: &str) -> Option<String> { + let lines = rendered + .lines() + .map(str::trim) + .filter(|line| !line.is_empty()) + .filter(|line| !matches!(*line, "```" | "```rust" | "---")) + .collect::<Vec<_>>(); + if lines.is_empty() { + return None; + } + let best = lines + .iter() + .copied() + .find(|line| looks_like_signature(line)) + .unwrap_or(lines[0]); + Some(truncate_compact(best.to_owned())) +} + +fn looks_like_signature(line: &str) -> bool { + [ + "fn ", "struct ", "enum ", "trait ", "type ", "const ", "let ", "impl ", "mod ", "pub ", + ] + .iter() + .any(|token| line.contains(token)) + || line.contains(": ") +} + +fn source_line_snippet(file_path: &SourceFilePath, line: u64) -> Option<String> { + let line_index = usize::try_from(line.saturating_sub(1)).ok()?; + let text = fs::read_to_string(file_path.as_path()).ok()?; + let raw = text.lines().nth(line_index)?; + let squashed = squash_whitespace(raw); + if squashed.is_empty() { + return None; + } + Some(squashed) +} + +fn truncate_compact(raw: String) -> String { + let char_count = raw.chars().count(); + if char_count <= PORCELAIN_SNIPPET_CHAR_LIMIT { + return raw; + } + raw.chars() + .take(PORCELAIN_SNIPPET_CHAR_LIMIT.saturating_sub(3)) + .chain("...".chars()) + .collect() +} + +fn squash_whitespace(raw: &str) -> String { + collapse_inline_whitespace(raw) +} + +fn common_workspace_root(path_style: PathStyleInput) -> Option<PathBuf> { + matches!(path_style, PathStyleInput::Relative) + .then(resolve_workspace_root_path) + .and_then(Result::ok) +} + +fn location_sort_key( + location: &SourceLocation, + workspace_root: Option<&Path>, +) -> (u8, String, u64, u64) { + let file_path = location.file_path(); + let rendered = + render_source_path_with_root(file_path, PathStyleInput::Relative, workspace_root); + ( + path_priority(file_path.as_path(), workspace_root), + rendered, + location.point().line().get(), + location.point().column().get(), + ) +} + +fn render_source_path(file_path: &SourceFilePath, style: PathStyleInput) -> String { + let workspace_root = common_workspace_root(style); + render_source_path_with_root(file_path, style, workspace_root.as_deref()) +} + +fn render_source_path_with_root( + file_path: &SourceFilePath, + style: PathStyleInput, + workspace_root: Option<&Path>, +) -> String { + let absolute = file_path.as_path(); + match style { + PathStyleInput::Absolute => absolute.display().to_string(), + PathStyleInput::Relative => workspace_root + .and_then(|root| absolute.strip_prefix(root).ok()) + .map(|relative| relative.display().to_string()) + .filter(|relative| !relative.is_empty()) + .unwrap_or_else(|| shorten_external_path(absolute)), + } +} + +fn path_priority(path: &Path, workspace_root: Option<&Path>) -> u8 { + if workspace_root + .and_then(|root| path.strip_prefix(root).ok()) + .is_some() + { + return 0; + } + let text = path.display().to_string(); + if text.contains("/.cargo/registry/src/") { + return 1; + } + if text.contains("/rust/library/") || text.contains("/rustc/") { + return 2; + } + 3 +} + +fn shorten_external_path(path: &Path) -> String { + let text = path.display().to_string(); + if let Some((_, suffix)) = text.split_once("/.cargo/registry/src/") { + return format!("[cargo]/{suffix}"); + } + if let Some((_, suffix)) = text.split_once("/rust/library/") { + return format!("[rust]/library/{suffix}"); + } + if let Some((_, suffix)) = text.split_once("/rustc/") { + return format!("[rustc]/{suffix}"); + } + text +} diff --git a/crates/adequate-rust-mcp/src/worker/schema.rs b/crates/adequate-rust-mcp/src/worker/schema.rs new file mode 100644 index 0000000..7ea8067 --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/schema.rs @@ -0,0 +1,496 @@ +use super::{ + deserialize_advanced_lsp_params, deserialize_diagnostic_file_targets, + deserialize_optional_human_unsigned_u64, +}; +use libmcp::{PathStyle, RenderMode}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +#[serde(transparent)] +pub(super) struct AbsolutePathInput(pub(super) String); + +#[derive(Debug, Clone, JsonSchema)] +#[serde(transparent)] +pub(super) struct OneIndexedInput(pub(super) u64); + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +pub(super) struct SymbolPositionInput { + #[schemars( + description = "Absolute path, file URI, or project-relative path. Also accepts aliases: filePath, path, uri." + )] + #[serde(alias = "filePath", alias = "path", alias = "uri")] + pub(super) file_path: AbsolutePathInput, + #[schemars(description = "One-indexed line. Also accepts aliases: lineNumber, line_number.")] + #[serde(alias = "lineNumber", alias = "line_number")] + pub(super) line: OneIndexedInput, + #[schemars(description = "One-indexed column. Also accepts aliases: character, char, col.")] + #[serde(alias = "character", alias = "char", alias = "col")] + pub(super) column: OneIndexedInput, +} + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields)] +pub(super) struct SymbolQueryInput { + #[serde(flatten)] + pub(super) position: SymbolPositionInput, + #[schemars( + description = "Output rendering. Values: porcelain or json. Defaults to porcelain. Also accepts aliases: output, output_format, outputFormat, render_mode, renderMode.", + extend("examples" = ["porcelain", "json"]) + )] + #[serde( + default, + alias = "output", + alias = "output_format", + alias = "outputFormat", + alias = "render_mode", + alias = "renderMode" + )] + pub(super) render: Option<CommonRenderInput>, + #[schemars( + description = "Path rendering style. Values: absolute or relative. Defaults to relative in porcelain mode and absolute in json mode. Also accepts aliases: pathStyle, path_style and value alias rel.", + extend("examples" = ["absolute", "relative"]) + )] + #[serde(default, alias = "pathStyle", alias = "path_style")] + pub(super) path_style: Option<PathStyleInput>, +} + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields)] +pub(super) struct DiagnosticsInput { + #[schemars( + description = "One or many target files. Also accepts aliases: file_path, filePath, path, uri, filePaths, files, paths, uris." + )] + #[serde( + default, + alias = "file_path", + alias = "filePath", + alias = "path", + alias = "uri", + alias = "file_paths", + alias = "filePaths", + alias = "paths", + alias = "uris", + alias = "files", + deserialize_with = "deserialize_diagnostic_file_targets" + )] + pub(super) file_paths: Vec<AbsolutePathInput>, + #[schemars( + description = "Detail mode. Values: compact, full, summary. Accepts full aliases: raw, verbose, counts.", + extend("examples" = ["compact", "full", "summary"]) + )] + #[serde(default)] + pub(super) mode: Option<DiagnosticsModeInput>, + #[schemars( + description = "Output rendering. Values: porcelain or json. Defaults to porcelain. Also accepts aliases: output, output_format, outputFormat, render_mode, renderMode.", + extend("examples" = ["porcelain", "json"]) + )] + #[serde( + default, + alias = "output", + alias = "output_format", + alias = "outputFormat", + alias = "render_mode", + alias = "renderMode" + )] + pub(super) render: Option<DiagnosticsRenderInput>, + #[schemars( + description = "Optional compact item limit. Also accepts aliases: limit, maxItems." + )] + #[serde( + default, + alias = "limit", + alias = "maxItems", + deserialize_with = "deserialize_optional_human_unsigned_u64" + )] + pub(super) max_items: Option<u64>, + #[schemars( + description = "Optional per-message character limit in compact mode. Also accepts alias: maxMessageChars." + )] + #[serde( + default, + alias = "maxMessageChars", + alias = "max_message_chars", + deserialize_with = "deserialize_optional_human_unsigned_u64" + )] + pub(super) max_message_chars: Option<u64>, + #[schemars( + description = "Path rendering style. Values: absolute or relative. Also accepts aliases: pathStyle, path_style and value alias rel.", + extend("examples" = ["absolute", "relative"]) + )] + #[serde(default, alias = "pathStyle", alias = "path_style")] + pub(super) path_style: DiagnosticsPathStyleInput, +} + +#[derive(Debug, Clone, Copy, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub(super) enum DiagnosticsModeInput { + #[default] + Compact, + #[serde(alias = "raw", alias = "verbose")] + Full, + #[serde(alias = "counts")] + Summary, +} + +pub(super) type DiagnosticsRenderInput = RenderMode; +pub(super) type CommonRenderInput = RenderMode; + +#[derive(Debug, Clone, Copy, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub(super) enum DiagnosticsPathStyleInput { + #[default] + Absolute, + #[serde(alias = "rel")] + Relative, +} + +pub(super) type PathStyleInput = PathStyle; + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +pub(super) struct RenameInput { + #[schemars( + description = "Absolute path, file URI, or project-relative path. Also accepts aliases: filePath, path, uri." + )] + #[serde(alias = "filePath", alias = "path", alias = "uri")] + pub(super) file_path: AbsolutePathInput, + #[schemars(description = "One-indexed line. Also accepts aliases: lineNumber, line_number.")] + #[serde(alias = "lineNumber", alias = "line_number")] + pub(super) line: OneIndexedInput, + #[schemars(description = "One-indexed column. Also accepts aliases: character, char, col.")] + #[serde(alias = "character", alias = "char", alias = "col")] + pub(super) column: OneIndexedInput, + #[schemars( + description = "Replacement symbol name. Also accepts aliases: newName, name, rename_to, renameTo." + )] + #[serde( + alias = "newName", + alias = "name", + alias = "rename_to", + alias = "renameTo" + )] + pub(super) new_name: String, +} + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +pub(super) struct AdvancedLspRequestInput { + #[schemars( + description = "Method accepts snake_case, camelCase, kebab-case, or full LSP method paths for supported operations." + )] + #[serde(alias = "lsp_method", alias = "lspMethod")] + pub(super) method: AdvancedLspMethod, + #[schemars(description = "LSP params payload. Also accepts aliases: arguments, payload.")] + #[serde( + alias = "arguments", + alias = "payload", + deserialize_with = "deserialize_advanced_lsp_params" + )] + pub(super) params: Value, +} + +#[derive(Debug, Clone, Deserialize, JsonSchema, Default)] +pub(super) struct FixEverythingInput {} + +#[derive(Debug, Clone, Deserialize, JsonSchema, Default)] +#[serde(deny_unknown_fields)] +pub(super) struct SnapshotRenderInput { + #[schemars( + description = "Output rendering. Values: porcelain or json. Defaults to porcelain. Also accepts aliases: output, output_format, outputFormat, render_mode, renderMode.", + extend("examples" = ["porcelain", "json"]) + )] + #[serde( + default, + alias = "output", + alias = "output_format", + alias = "outputFormat", + alias = "render_mode", + alias = "renderMode" + )] + pub(super) render: Option<CommonRenderInput>, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +#[schemars( + description = "Canonical snake_case method names. Input also accepts camelCase, kebab-case, and full LSP method paths." +)] +pub(super) enum AdvancedLspMethod { + Hover, + Definition, + References, + Declaration, + TypeDefinition, + Implementation, + Completion, + CompletionResolve, + SignatureHelp, + DocumentSymbol, + WorkspaceSymbol, + WorkspaceSymbolResolve, + PrepareRename, + Rename, + CodeAction, + CodeActionResolve, + CodeLens, + CodeLensResolve, + ExecuteCommand, + Formatting, + RangeFormatting, + OnTypeFormatting, + DocumentHighlight, + DocumentLink, + DocumentLinkResolve, + DocumentColor, + ColorPresentation, + LinkedEditingRange, + InlayHint, + InlayHintResolve, + FoldingRange, + SelectionRange, + DocumentDiagnostic, + WorkspaceDiagnostic, + SemanticTokensFull, + SemanticTokensFullDelta, + SemanticTokensRange, + Moniker, + InlineValue, + TypeHierarchyPrepare, + TypeHierarchySupertypes, + TypeHierarchySubtypes, + CallHierarchyPrepare, + CallHierarchyIncomingCalls, + CallHierarchyOutgoingCalls, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct LocationOutput { + pub(super) file_path: String, + pub(super) line: u64, + pub(super) column: u64, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct LocationsOutput { + pub(super) locations: Vec<LocationOutput>, + pub(super) uncertain_empty: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub(super) note: Option<String>, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct RangeOutput { + pub(super) file_path: String, + pub(super) start: LocationOutput, + pub(super) end: LocationOutput, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct HoverOutput { + pub(super) rendered: Option<String>, + pub(super) range: Option<RangeOutput>, + pub(super) uncertain_empty: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub(super) note: Option<String>, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct DiagnosticsJsonOutput { + pub(super) mode: DiagnosticsModeOutput, + pub(super) counts: DiagnosticsCountsOutput, + pub(super) truncated: bool, + pub(super) overflow_count: u64, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub(super) items: Vec<CompactDiagnosticOutput>, + #[serde(skip_serializing_if = "Option::is_none")] + pub(super) diagnostics: Option<Vec<DiagnosticOutput>>, +} + +#[derive(Debug, Clone, Copy, Serialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub(super) enum DiagnosticsModeOutput { + Compact, + Full, + Summary, +} + +#[derive(Debug, Clone, Copy, Serialize, JsonSchema)] +pub(super) struct DiagnosticsCountsOutput { + pub(super) error_count: u64, + pub(super) warning_count: u64, + pub(super) information_count: u64, + pub(super) hint_count: u64, + pub(super) total_count: u64, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct CompactDiagnosticOutput { + pub(super) severity: DiagnosticLevelOutput, + pub(super) file_path: String, + pub(super) start_line: u64, + pub(super) start_column: u64, + pub(super) end_line: u64, + pub(super) end_column: u64, + #[serde(skip_serializing_if = "Option::is_none")] + pub(super) code: Option<String>, + pub(super) message: String, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct DiagnosticOutput { + pub(super) range: RangeOutput, + pub(super) level: DiagnosticLevelOutput, + pub(super) code: Option<String>, + pub(super) message: String, +} + +#[derive(Debug, Clone)] +pub(super) struct FlattenedDiagnostics { + pub(super) counts: DiagnosticsCountsOutput, + pub(super) items: Vec<CompactDiagnosticOutput>, + pub(super) overflow_count: usize, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub(super) enum DiagnosticLevelOutput { + Error, + Warning, + Information, + Hint, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct RenameOutput { + pub(super) files_touched: u64, + pub(super) edits_applied: u64, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct AdvancedLspResponseOutput { + pub(super) result: Value, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct FixEverythingOutput { + pub(super) success: bool, + pub(super) workspace_root: String, + pub(super) steps: Vec<FixStepOutput>, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct FixStepOutput { + pub(super) step_name: String, + pub(super) command: Vec<String>, + pub(super) success: bool, + pub(super) exit_code: Option<i32>, + #[serde(skip_serializing_if = "Option::is_none")] + pub(super) standard_output_excerpt: Option<String>, + #[serde(skip_serializing_if = "Option::is_none")] + pub(super) standard_error_excerpt: Option<String>, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct HealthOutput { + pub(super) state: HealthStateOutput, + pub(super) generation: u64, + pub(super) last_fault: Option<FaultOutput>, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub(super) enum HealthStateOutput { + Cold, + Starting, + Ready, + Recovering, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct FaultOutput { + pub(super) class: String, + pub(super) code: String, + pub(super) detail: String, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct TelemetryOutput { + pub(super) uptime_ms: u64, + pub(super) state: HealthStateOutput, + pub(super) generation: u64, + pub(super) consecutive_failures: u32, + pub(super) restart_count: u64, + pub(super) totals: TelemetryTotalsOutput, + pub(super) methods: Vec<MethodTelemetryOutput>, + pub(super) last_fault: Option<FaultOutput>, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct TelemetryTotalsOutput { + pub(super) request_count: u64, + pub(super) success_count: u64, + pub(super) response_error_count: u64, + pub(super) transport_fault_count: u64, + pub(super) retry_count: u64, +} + +#[derive(Debug, Clone, Serialize, JsonSchema)] +pub(super) struct MethodTelemetryOutput { + pub(super) method: String, + pub(super) request_count: u64, + pub(super) success_count: u64, + pub(super) response_error_count: u64, + pub(super) transport_fault_count: u64, + pub(super) retry_count: u64, + pub(super) last_latency_ms: Option<u64>, + pub(super) max_latency_ms: u64, + pub(super) avg_latency_ms: u64, + pub(super) last_error: Option<String>, +} + +#[derive(Debug, Deserialize)] +#[serde(untagged)] +pub(super) enum OneIndexedRawInput { + Unsigned(u64), + Float(f64), + Text(String), +} + +#[derive(Debug, Deserialize)] +#[serde(untagged)] +pub(super) enum DiagnosticFileTargetsRaw { + Single(String), + Many(Vec<String>), +} + +#[derive(Debug, Deserialize)] +pub(super) struct CargoJsonMessageWire { + pub(super) reason: String, + #[serde(default)] + pub(super) message: Option<RustcDiagnosticWire>, +} + +#[derive(Debug, Deserialize)] +pub(super) struct RustcDiagnosticWire { + pub(super) message: String, + pub(super) level: String, + #[serde(default)] + pub(super) code: Option<RustcCodeWire>, + #[serde(default)] + pub(super) spans: Vec<RustcSpanWire>, +} + +#[derive(Debug, Deserialize)] +pub(super) struct RustcCodeWire { + pub(super) code: String, +} + +#[derive(Debug, Deserialize)] +pub(super) struct RustcSpanWire { + pub(super) file_name: String, + pub(super) line_start: u64, + pub(super) line_end: u64, + pub(super) column_start: u64, + pub(super) column_end: u64, + pub(super) is_primary: bool, +} diff --git a/crates/adequate-rust-mcp/src/worker/server.rs b/crates/adequate-rust-mcp/src/worker/server.rs new file mode 100644 index 0000000..322f37a --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/server.rs @@ -0,0 +1,744 @@ +use super::{ + AdvancedLspRequestInput, AdvancedLspResponseOutput, DiagnosticsInput, EMPTY_RESULT_MAX_RETRIES, + EMPTY_RESULT_RETRY_DELAY_MS, Engine, EngineConfig, FixEverythingInput, FixEverythingOutput, + HealthOutput, HoverOutput, HoverPayload, Json, LocationsOutput, McpError, + MethodTelemetryOutput, PorcelainErrorKind, RenameInput, RenameOutput, ServerHandler, + ServerInfo, ServiceExt, SnapshotRenderInput, SymbolPositionInput, SymbolQueryInput, + TelemetryOutput, ToolTelemetryState, WorkspaceRoot, call_tool_result_with_render, + collect_clippy_diagnostics, diagnostics_call_tool_result, diagnostics_json_output_schema, + elapsed_millis_saturating, hover_output_from_payload, json_output_schema, + locations_output_from_sources, map_engine_error, porcelain_internal_error, + porcelain_invalid_params, render_definition_porcelain, render_health_porcelain, + render_hover_porcelain, render_references_porcelain, render_telemetry_porcelain, + run_workspace_fix_everything, should_retry_empty_symbol_result, +}; +use ra_mcp_engine::BackoffPolicy; +use rmcp::model::CallToolResult; +use rmcp::{tool, tool_handler, tool_router, transport::stdio}; +use serde::Deserialize; +use serde_json::Value; +use std::{ + collections::HashMap, + future::Future, + path::{Path, PathBuf}, + pin::Pin, + process::Command, + str::FromStr, + sync::{Arc, Mutex as StdMutex}, + time::{Duration, Instant}, +}; +use tokio::{sync::Mutex as AsyncMutex, time::sleep}; +use tracing::Level; + +type SymbolQueryFuture<'a, T> = + Pin<Box<dyn Future<Output = Result<T, ra_mcp_engine::EngineError>> + Send + 'a>>; + +#[derive(Clone)] +struct EngineRegistry { + template_config: EngineConfig, + active_workspace_root: Arc<AsyncMutex<WorkspaceRoot>>, + engines: Arc<AsyncMutex<HashMap<WorkspaceRoot, Arc<Engine>>>>, +} + +impl EngineRegistry { + fn new(config: EngineConfig) -> Self { + let workspace_root = config.workspace_root.clone(); + let engine = Arc::new(Engine::new(config.clone())); + let mut engines = HashMap::new(); + let previous = engines.insert(workspace_root.clone(), engine); + debug_assert!(previous.is_none()); + Self { + template_config: config, + active_workspace_root: Arc::new(AsyncMutex::new(workspace_root)), + engines: Arc::new(AsyncMutex::new(engines)), + } + } + + async fn active_engine(&self) -> Result<Arc<Engine>, McpError> { + let workspace_root = self.active_workspace_root.lock().await.clone(); + self.engine_for_workspace_root(workspace_root).await + } + + async fn engine_for_position( + &self, + position: &super::SourcePosition, + ) -> Result<Arc<Engine>, McpError> { + self.engine_for_file_path(position.file_path()).await + } + + async fn engine_for_file_path( + &self, + file_path: &super::SourceFilePath, + ) -> Result<Arc<Engine>, McpError> { + let workspace_root = self.workspace_root_for_file(file_path).await?; + self.engine_for_workspace_root(workspace_root).await + } + + async fn workspace_root_for_file( + &self, + file_path: &super::SourceFilePath, + ) -> Result<WorkspaceRoot, McpError> { + let cached = { + let engines = self.engines.lock().await; + best_cached_workspace_root(engines.keys(), file_path.as_path()) + }; + if let Some(root) = cached.as_ref() + && !needs_workspace_root_discovery(file_path.as_path(), root.as_path()) + { + return Ok(root.clone()); + } + Ok(discover_workspace_root( + file_path.as_path(), + &self.template_config.workspace_root, + )) + } + + async fn engine_for_workspace_root( + &self, + workspace_root: WorkspaceRoot, + ) -> Result<Arc<Engine>, McpError> { + let engine = { + let mut engines = self.engines.lock().await; + if let Some(existing) = engines.get(&workspace_root) { + existing.clone() + } else { + let mut config = self.template_config.clone(); + config.workspace_root = workspace_root.clone(); + let created = Arc::new(Engine::new(config)); + let previous = engines.insert(workspace_root.clone(), created.clone()); + debug_assert!(previous.is_none()); + created + } + }; + *self.active_workspace_root.lock().await = workspace_root; + Ok(engine) + } +} + +#[derive(Clone)] +struct DiagnosticQuery { + engine: Arc<Engine>, + file_path: super::SourceFilePath, +} + +#[derive(Clone)] +struct AdequateRustMcpServer { + engines: EngineRegistry, + tool_telemetry: Arc<StdMutex<ToolTelemetryState>>, + tool_router: rmcp::handler::server::tool::ToolRouter<Self>, +} + +#[tool_router] +impl AdequateRustMcpServer { + fn new(config: EngineConfig) -> Self { + Self { + engines: EngineRegistry::new(config), + tool_telemetry: Arc::new(StdMutex::new(ToolTelemetryState::default())), + tool_router: Self::tool_router(), + } + } + + fn with_tool_telemetry<R>(&self, mutate: impl FnOnce(&mut ToolTelemetryState) -> R) -> R { + let mut guard = match self.tool_telemetry.lock() { + Ok(state) => state, + Err(poisoned) => poisoned.into_inner(), + }; + mutate(&mut guard) + } + + async fn plan_diagnostic_queries( + &self, + file_paths: Vec<super::SourceFilePath>, + ) -> Result<Vec<DiagnosticQuery>, McpError> { + let mut queries = Vec::with_capacity(file_paths.len()); + for file_path in file_paths { + let engine = self.engines.engine_for_file_path(&file_path).await?; + queries.push(DiagnosticQuery { engine, file_path }); + } + Ok(queries) + } + + fn record_clippy_telemetry_success(&self, elapsed: Duration) { + self.with_tool_telemetry(|state| { + state + .clippy_diagnostics + .record_success(elapsed_millis_saturating(elapsed)); + }); + } + + fn record_clippy_telemetry_error(&self, elapsed: Duration, error: String) { + self.with_tool_telemetry(|state| { + state + .clippy_diagnostics + .record_error(elapsed_millis_saturating(elapsed), error); + }); + } + + fn clippy_telemetry_snapshot(&self) -> Option<MethodTelemetryOutput> { + self.with_tool_telemetry(|state| { + state + .clippy_diagnostics + .to_snapshot("tool/clippy_diagnostics") + }) + } + + fn record_fix_everything_telemetry_success(&self, elapsed: Duration) { + self.with_tool_telemetry(|state| { + state + .fix_everything + .record_success(elapsed_millis_saturating(elapsed)); + }); + } + + fn record_fix_everything_telemetry_error(&self, elapsed: Duration, error: String) { + self.with_tool_telemetry(|state| { + state + .fix_everything + .record_error(elapsed_millis_saturating(elapsed), error); + }); + } + + fn fix_everything_telemetry_snapshot(&self) -> Option<MethodTelemetryOutput> { + self.with_tool_telemetry(|state| state.fix_everything.to_snapshot("tool/fix_everything")) + } + + async fn collect_diagnostics_with_warmup_retry( + &self, + file_paths: Vec<super::SourceFilePath>, + ) -> Result<super::DiagnosticsReport, McpError> { + let queries = self.plan_diagnostic_queries(file_paths).await?; + let mut retries = 0_u8; + loop { + let mut aggregated = super::DiagnosticsReport { + diagnostics: Vec::new(), + }; + let mut should_retry_all = !queries.is_empty(); + for query in &queries { + let report = query + .engine + .diagnostics(query.file_path.clone()) + .await + .map_err(map_engine_error)?; + let uptime_ms = query.engine.telemetry_snapshot().await.uptime_ms; + if !should_retry_unlinked_diagnostics(&report, uptime_ms) { + should_retry_all = false; + } + aggregated.diagnostics.extend(report.diagnostics); + } + if !should_retry_all || retries >= EMPTY_RESULT_MAX_RETRIES { + return Ok(aggregated); + } + retries = retries.saturating_add(1); + sleep(Duration::from_millis(EMPTY_RESULT_RETRY_DELAY_MS)).await; + } + } + + async fn resolve_symbol_query<T>( + &self, + position: super::SourcePosition, + is_empty: impl Fn(&T) -> bool, + fetch: impl for<'a> Fn(&'a Engine, super::SourcePosition) -> SymbolQueryFuture<'a, T>, + ) -> Result<(T, bool), McpError> { + let engine = self.engines.engine_for_position(&position).await?; + let mut retries = 0_u8; + let mut retried_during_warmup = false; + + loop { + let result = fetch(engine.as_ref(), position.clone()) + .await + .map_err(map_engine_error)?; + if !is_empty(&result) { + return Ok((result, false)); + } + + let still_warming = + should_retry_empty_symbol_result(engine.telemetry_snapshot().await.uptime_ms); + if !still_warming || retries >= EMPTY_RESULT_MAX_RETRIES { + return Ok((result, retried_during_warmup || still_warming)); + } + + retried_during_warmup = true; + retries = retries.saturating_add(1); + sleep(Duration::from_millis(EMPTY_RESULT_RETRY_DELAY_MS)).await; + } + } + + #[tool( + description = "Read hover information at a source position. Defaults to render=porcelain; use render=json for structured output.", + output_schema = json_output_schema::<HoverOutput>() + )] + async fn hover( + &self, + params: rmcp::handler::server::wrapper::Parameters<SymbolQueryInput>, + ) -> Result<CallToolResult, McpError> { + let (position, render_config) = params.0.into_request()?; + let (payload, uncertain_empty) = self + .resolve_symbol_query( + position, + |hover: &HoverPayload| hover.rendered.is_none() && hover.range.is_none(), + |engine, position| Box::pin(engine.hover(position)), + ) + .await?; + let output = hover_output_from_payload(payload.clone(), render_config, uncertain_empty); + let porcelain = render_hover_porcelain(&payload, render_config, uncertain_empty); + call_tool_result_with_render(output, render_config.render, porcelain) + } + + #[tool( + description = "Resolve symbol definition locations at a source position. Defaults to render=porcelain; use render=json for structured output.", + output_schema = json_output_schema::<LocationsOutput>() + )] + async fn definition( + &self, + params: rmcp::handler::server::wrapper::Parameters<SymbolQueryInput>, + ) -> Result<CallToolResult, McpError> { + let (position, render_config) = params.0.into_request()?; + let (locations, uncertain_empty) = self + .resolve_symbol_query(position, Vec::is_empty, |engine, position| { + Box::pin(engine.definition(position)) + }) + .await?; + let output = + locations_output_from_sources(locations.clone(), render_config, uncertain_empty); + let porcelain = render_definition_porcelain(&locations, render_config, uncertain_empty); + call_tool_result_with_render(output, render_config.render, porcelain) + } + + #[tool( + description = "Find all symbol references at a source position. Defaults to render=porcelain; use render=json for structured output.", + output_schema = json_output_schema::<LocationsOutput>() + )] + async fn references( + &self, + params: rmcp::handler::server::wrapper::Parameters<SymbolQueryInput>, + ) -> Result<CallToolResult, McpError> { + let (position, render_config) = params.0.into_request()?; + let (locations, uncertain_empty) = self + .resolve_symbol_query(position, Vec::is_empty, |engine, position| { + Box::pin(engine.references(position)) + }) + .await?; + let output = + locations_output_from_sources(locations.clone(), render_config, uncertain_empty); + let porcelain = render_references_porcelain(&locations, render_config, uncertain_empty); + call_tool_result_with_render(output, render_config.render, porcelain) + } + + #[tool(description = "Rename a symbol at a source position.")] + async fn rename_symbol( + &self, + params: rmcp::handler::server::wrapper::Parameters<RenameInput>, + ) -> Result<Json<RenameOutput>, McpError> { + let RenameInput { + file_path, + line, + column, + new_name, + } = params.0; + let position = SymbolPositionInput { + file_path, + line, + column, + } + .into_source_position()?; + let engine = self.engines.engine_for_position(&position).await?; + let report = engine + .rename_symbol(position, new_name) + .await + .map_err(map_engine_error)?; + Ok(Json(RenameOutput::from(report))) + } + + #[tool( + description = "Collect diagnostics for one or more files. Defaults to mode=compact, render=porcelain; use render=json for structured output.", + output_schema = diagnostics_json_output_schema() + )] + async fn diagnostics( + &self, + params: rmcp::handler::server::wrapper::Parameters<DiagnosticsInput>, + ) -> Result<CallToolResult, McpError> { + let (file_paths, render_config) = params.0.into_request()?; + let report = self + .collect_diagnostics_with_warmup_retry(file_paths) + .await?; + diagnostics_call_tool_result(report, render_config) + } + + #[tool( + description = "Run cargo clippy for one or more files and return diagnostics using workspace-configured strictness. Defaults to mode=compact, render=porcelain; use render=json for structured output.", + output_schema = diagnostics_json_output_schema() + )] + async fn clippy_diagnostics( + &self, + params: rmcp::handler::server::wrapper::Parameters<DiagnosticsInput>, + ) -> Result<CallToolResult, McpError> { + let started_at = Instant::now(); + let (file_paths, render_config) = match params.0.into_request() { + Ok(parsed) => parsed, + Err(error) => { + self.record_clippy_telemetry_error(started_at.elapsed(), error.to_string()); + return Err(error); + } + }; + let report = collect_clippy_diagnostics(file_paths).await; + let elapsed = started_at.elapsed(); + match report { + Ok(report) => { + self.record_clippy_telemetry_success(elapsed); + diagnostics_call_tool_result(report, render_config) + } + Err(error) => { + self.record_clippy_telemetry_error(elapsed, error.to_string()); + Err(error) + } + } + } + + #[tool( + description = "ONE-STOP: apply workspace autofixes (`cargo fmt --all` + `cargo clippy --fix`) using workspace-configured strictness." + )] + async fn fix_everything( + &self, + _params: rmcp::handler::server::wrapper::Parameters<FixEverythingInput>, + ) -> Result<Json<FixEverythingOutput>, McpError> { + let started_at = Instant::now(); + let output = run_workspace_fix_everything().await; + let elapsed = started_at.elapsed(); + match output { + Ok(output) => { + if output.success { + self.record_fix_everything_telemetry_success(elapsed); + } else { + let failed_step = output + .steps + .iter() + .find(|step| !step.success) + .map(|step| step.step_name.clone()) + .unwrap_or_else(|| "unknown".to_owned()); + self.record_fix_everything_telemetry_error( + elapsed, + format!("workspace fix reported failure in step `{failed_step}`"), + ); + } + Ok(Json(output)) + } + Err(error) => { + self.record_fix_everything_telemetry_error(elapsed, error.to_string()); + Err(error) + } + } + } + + #[tool( + description = "Invoke advanced rust-analyzer LSP requests (completion, code actions, prepare-rename, formatting, symbols, semantic tokens, call hierarchy, etc.)." + )] + async fn advanced_lsp_request( + &self, + params: rmcp::handler::server::wrapper::Parameters<AdvancedLspRequestInput>, + ) -> Result<Json<AdvancedLspResponseOutput>, McpError> { + let AdvancedLspRequestInput { + method, + params: lsp_params, + } = params.0; + let engine = if let Some(file_path) = source_file_path_hint_from_value(&lsp_params) { + self.engines.engine_for_file_path(&file_path).await? + } else { + self.engines.active_engine().await? + }; + let response = engine + .raw_lsp_request(method.as_lsp_method(), lsp_params) + .await + .map_err(map_engine_error)?; + Ok(Json(AdvancedLspResponseOutput { result: response })) + } + + #[tool( + description = "Return process lifecycle and latest fault state. Defaults to render=porcelain; use render=json for structured output.", + output_schema = json_output_schema::<HealthOutput>() + )] + async fn health_snapshot( + &self, + params: rmcp::handler::server::wrapper::Parameters<SnapshotRenderInput>, + ) -> Result<CallToolResult, McpError> { + let render = params.0.render(); + let snapshot = self + .engines + .active_engine() + .await? + .lifecycle_snapshot() + .await; + let output = HealthOutput::from(snapshot); + let porcelain = render_health_porcelain(&output); + call_tool_result_with_render(output, render, porcelain) + } + + #[tool( + description = "Return aggregate request/recovery telemetry for this worker process. Defaults to render=porcelain; use render=json for structured output.", + output_schema = json_output_schema::<TelemetryOutput>() + )] + async fn telemetry_snapshot( + &self, + params: rmcp::handler::server::wrapper::Parameters<SnapshotRenderInput>, + ) -> Result<CallToolResult, McpError> { + let render = params.0.render(); + let snapshot = self + .engines + .active_engine() + .await? + .telemetry_snapshot() + .await; + let mut tool_snapshots = Vec::new(); + if let Some(clippy_tool) = self.clippy_telemetry_snapshot() { + tool_snapshots.push(clippy_tool); + } + if let Some(fix_tool) = self.fix_everything_telemetry_snapshot() { + tool_snapshots.push(fix_tool); + } + let output = TelemetryOutput::from_snapshots(snapshot, tool_snapshots); + let porcelain = render_telemetry_porcelain(&output); + call_tool_result_with_render(output, render, porcelain) + } +} + +#[tool_handler] +impl ServerHandler for AdequateRustMcpServer { + fn get_info(&self) -> ServerInfo { + ServerInfo { + instructions: Some( + "Robust rust-analyzer MCP server with aggressive self-healing and restart semantics." + .into(), + ), + capabilities: rmcp::model::ServerCapabilities::builder() + .enable_tools() + .build(), + server_info: rmcp::model::Implementation { + name: "adequate-rust-mcp".into(), + version: env!("CARGO_PKG_VERSION").into(), + ..Default::default() + }, + ..Default::default() + } + } +} + +fn read_duration_env(name: &str, default: Duration) -> Duration { + let raw = std::env::var(name); + let Ok(raw) = raw else { + return default; + }; + let parsed = u64::from_str(&raw); + let Ok(value) = parsed else { + return default; + }; + Duration::from_millis(value) +} + +fn read_workspace_root() -> Result<WorkspaceRoot, McpError> { + let from_env = std::env::var("ADEQUATE_MCP_WORKSPACE_ROOT") + .ok() + .map(PathBuf::from); + let raw_root = match from_env { + Some(path) => path, + None => std::env::current_dir().map_err(|_| { + porcelain_internal_error( + "failed to determine current working directory", + PorcelainErrorKind::InternalFailure, + Some("set ADEQUATE_MCP_WORKSPACE_ROOT explicitly"), + false, + ) + })?, + }; + WorkspaceRoot::try_new(raw_root).map_err(|_| { + porcelain_invalid_params( + "workspace root is invalid", + PorcelainErrorKind::InvalidInput, + Some("set ADEQUATE_MCP_WORKSPACE_ROOT to an existing directory"), + ) + }) +} + +fn build_engine_config() -> Result<EngineConfig, McpError> { + let workspace_root = read_workspace_root()?; + let binary = std::env::var("ADEQUATE_MCP_RA_BINARY") + .ok() + .filter(|value| !value.is_empty()) + .map(PathBuf::from) + .unwrap_or_else(|| PathBuf::from("rust-analyzer")); + let startup_timeout = + read_duration_env("ADEQUATE_MCP_STARTUP_TIMEOUT_MS", Duration::from_secs(20)); + let request_timeout = + read_duration_env("ADEQUATE_MCP_REQUEST_TIMEOUT_MS", Duration::from_secs(12)); + let floor = read_duration_env("ADEQUATE_MCP_BACKOFF_FLOOR_MS", Duration::from_millis(50)); + let ceiling = read_duration_env("ADEQUATE_MCP_BACKOFF_CEILING_MS", Duration::from_secs(2)); + let backoff_policy = BackoffPolicy::try_new(floor, ceiling).map_err(|_| { + porcelain_invalid_params( + "backoff timing configuration is invalid", + PorcelainErrorKind::InvalidInput, + Some("ensure backoff floor is <= ceiling and both are positive"), + ) + })?; + EngineConfig::try_new( + workspace_root, + binary, + Vec::new(), + Vec::new(), + startup_timeout, + request_timeout, + backoff_policy, + ) + .map_err(|_| { + porcelain_invalid_params( + "engine configuration is invalid", + PorcelainErrorKind::InvalidInput, + Some("verify workspace root and timeout/backoff environment values"), + ) + }) +} + +/// Runs the worker-mode MCP server. +pub(crate) async fn run_worker() -> Result<(), Box<dyn std::error::Error>> { + let init_result = tracing_subscriber::fmt() + .with_max_level(Level::INFO) + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .with_writer(std::io::stderr) + .try_init(); + if let Err(error) = init_result { + eprintln!("worker tracing init skipped: {error}"); + } + + let config = build_engine_config()?; + let server = AdequateRustMcpServer::new(config); + let service = server.serve(stdio()).await?; + let quit_reason = service.waiting().await?; + tracing::info!("server terminated: {quit_reason:?}"); + Ok(()) +} + +#[derive(Debug, Deserialize)] +struct CargoMetadataWorkspaceRoot { + workspace_root: PathBuf, +} + +fn best_cached_workspace_root<'a>( + roots: impl Iterator<Item = &'a WorkspaceRoot>, + file_path: &Path, +) -> Option<WorkspaceRoot> { + roots + .filter(|root| file_path.starts_with(root.as_path())) + .max_by_key(|root| root.as_path().components().count()) + .cloned() +} + +fn discover_workspace_root(file_path: &Path, fallback_root: &WorkspaceRoot) -> WorkspaceRoot { + if file_path.starts_with(fallback_root.as_path()) + && !needs_workspace_root_discovery(file_path, fallback_root.as_path()) + { + return fallback_root.clone(); + } + let Some(start_dir) = file_path.parent() else { + return fallback_root.clone(); + }; + cargo_metadata_workspace_root(start_dir, file_path) + .or_else(|| git_workspace_root(start_dir, file_path)) + .unwrap_or_else(|| fallback_root.clone()) +} + +fn needs_workspace_root_discovery(file_path: &Path, workspace_root: &Path) -> bool { + file_path + .strip_prefix(workspace_root) + .ok() + .and_then(|relative| relative.components().next()) + .is_some_and(|component| component.as_os_str() == ".worktrees") +} + +fn cargo_metadata_workspace_root(start_dir: &Path, file_path: &Path) -> Option<WorkspaceRoot> { + let output = Command::new("cargo") + .arg("metadata") + .arg("--format-version") + .arg("1") + .arg("--no-deps") + .current_dir(start_dir) + .output() + .ok()?; + if !output.status.success() { + return None; + } + let parsed = serde_json::from_slice::<CargoMetadataWorkspaceRoot>(&output.stdout).ok()?; + workspace_root_candidate(parsed.workspace_root, file_path) +} + +fn git_workspace_root(start_dir: &Path, file_path: &Path) -> Option<WorkspaceRoot> { + let output = Command::new("git") + .arg("rev-parse") + .arg("--show-toplevel") + .current_dir(start_dir) + .output() + .ok()?; + if !output.status.success() { + return None; + } + let raw_root = String::from_utf8(output.stdout).ok()?; + workspace_root_candidate(PathBuf::from(raw_root.trim()), file_path) +} + +fn workspace_root_candidate(candidate: PathBuf, file_path: &Path) -> Option<WorkspaceRoot> { + let workspace_root = WorkspaceRoot::try_new(candidate).ok()?; + file_path + .starts_with(workspace_root.as_path()) + .then_some(workspace_root) +} + +fn source_file_path_hint_from_value(value: &Value) -> Option<super::SourceFilePath> { + match value { + Value::String(raw) => source_file_path_hint_from_str(raw), + Value::Object(object) => { + for key in ["uri", "file_path", "filePath", "path"] { + let raw = object.get(key).and_then(Value::as_str); + if let Some(raw) = raw + && let Some(file_path) = source_file_path_hint_from_str(raw) + { + return Some(file_path); + } + } + object.values().find_map(source_file_path_hint_from_value) + } + Value::Array(items) => items.iter().find_map(source_file_path_hint_from_value), + Value::Null | Value::Bool(_) | Value::Number(_) => None, + } +} + +fn source_file_path_hint_from_str(raw: &str) -> Option<super::SourceFilePath> { + let trimmed = raw.trim(); + if trimmed.starts_with("file://") { + let file_url = url::Url::parse(trimmed).ok()?; + let path = file_url.to_file_path().ok()?; + return super::SourceFilePath::try_new(path).ok(); + } + let path = PathBuf::from(trimmed); + path.is_absolute() + .then(|| super::SourceFilePath::try_new(path).ok()) + .flatten() +} + +fn should_retry_unlinked_diagnostics(report: &super::DiagnosticsReport, uptime_ms: u64) -> bool { + should_retry_empty_symbol_result(uptime_ms) + && !report.diagnostics.is_empty() + && report + .diagnostics + .iter() + .all(diagnostic_looks_like_unlinked_file) +} + +fn diagnostic_looks_like_unlinked_file(diagnostic: &super::DiagnosticEntry) -> bool { + diagnostic.code.as_deref().is_some_and(|code| { + code.eq_ignore_ascii_case("unlinked-file") || code.eq_ignore_ascii_case("unlinked_file") + }) || { + let message = diagnostic.message.to_ascii_lowercase(); + message.contains("not part of any crate") + || message.contains("not part of a crate") + || message.contains("not included in any crates") + || message.contains("not included in the crate graph") + || message.contains("not included in crate graph") + || message.contains("can't offer ide services") + } +} diff --git a/crates/adequate-rust-mcp/src/worker/telemetry.rs b/crates/adequate-rust-mcp/src/worker/telemetry.rs new file mode 100644 index 0000000..92c7371 --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/telemetry.rs @@ -0,0 +1,152 @@ +use super::{ + FaultOutput, HealthStateOutput, LifecycleSnapshot, MethodTelemetryOutput, + MethodTelemetrySnapshot, TelemetryOutput, TelemetrySnapshot, TelemetryTotals, + TelemetryTotalsOutput, +}; + +#[derive(Debug, Default)] +pub(super) struct ToolTelemetryState { + pub(super) clippy_diagnostics: MethodTelemetryAccumulator, + pub(super) fix_everything: MethodTelemetryAccumulator, +} + +#[derive(Debug, Clone, Default)] +pub(super) struct MethodTelemetryAccumulator { + pub(super) request_count: u64, + pub(super) success_count: u64, + pub(super) response_error_count: u64, + pub(super) transport_fault_count: u64, + pub(super) retry_count: u64, + pub(super) total_latency_ms: u128, + pub(super) last_latency_ms: Option<u64>, + pub(super) max_latency_ms: u64, + pub(super) last_error: Option<String>, +} + +impl MethodTelemetryAccumulator { + pub(super) fn record_success(&mut self, latency_ms: u64) { + self.request_count = self.request_count.saturating_add(1); + self.success_count = self.success_count.saturating_add(1); + self.total_latency_ms = self.total_latency_ms.saturating_add(u128::from(latency_ms)); + self.last_latency_ms = Some(latency_ms); + self.max_latency_ms = self.max_latency_ms.max(latency_ms); + self.last_error = None; + } + + pub(super) fn record_error(&mut self, latency_ms: u64, error: String) { + self.request_count = self.request_count.saturating_add(1); + self.response_error_count = self.response_error_count.saturating_add(1); + self.total_latency_ms = self.total_latency_ms.saturating_add(u128::from(latency_ms)); + self.last_latency_ms = Some(latency_ms); + self.max_latency_ms = self.max_latency_ms.max(latency_ms); + self.last_error = Some(error); + } + + pub(super) fn to_snapshot(&self, method: &str) -> Option<MethodTelemetryOutput> { + if self.request_count == 0 { + return None; + } + let avg_latency_ms = if self.request_count == 0 { + 0 + } else { + let average = self.total_latency_ms / u128::from(self.request_count); + u64::try_from(average).unwrap_or(u64::MAX) + }; + Some(MethodTelemetryOutput { + method: method.to_owned(), + request_count: self.request_count, + success_count: self.success_count, + response_error_count: self.response_error_count, + transport_fault_count: self.transport_fault_count, + retry_count: self.retry_count, + last_latency_ms: self.last_latency_ms, + max_latency_ms: self.max_latency_ms, + avg_latency_ms, + last_error: self.last_error.clone(), + }) + } +} + +impl From<TelemetryTotals> for TelemetryTotalsOutput { + fn from(value: TelemetryTotals) -> Self { + Self { + request_count: value.request_count, + success_count: value.success_count, + response_error_count: value.response_error_count, + transport_fault_count: value.transport_fault_count, + retry_count: value.retry_count, + } + } +} + +impl From<MethodTelemetrySnapshot> for MethodTelemetryOutput { + fn from(value: MethodTelemetrySnapshot) -> Self { + Self { + method: value.method, + request_count: value.request_count, + success_count: value.success_count, + response_error_count: value.response_error_count, + transport_fault_count: value.transport_fault_count, + retry_count: value.retry_count, + last_latency_ms: value.last_latency_ms, + max_latency_ms: value.max_latency_ms, + avg_latency_ms: value.avg_latency_ms, + last_error: value.last_error, + } + } +} + +impl TelemetryOutput { + pub(super) fn from_snapshots( + value: TelemetrySnapshot, + extra_tools: Vec<MethodTelemetryOutput>, + ) -> Self { + let TelemetrySnapshot { + uptime_ms, + lifecycle, + consecutive_failures, + restart_count, + totals, + methods, + last_fault, + } = value; + let (state, generation) = match lifecycle { + LifecycleSnapshot::Cold { generation } => (HealthStateOutput::Cold, generation.get()), + LifecycleSnapshot::Starting { generation } => { + (HealthStateOutput::Starting, generation.get()) + } + LifecycleSnapshot::Ready { generation } => (HealthStateOutput::Ready, generation.get()), + LifecycleSnapshot::Recovering { generation, .. } => { + (HealthStateOutput::Recovering, generation.get()) + } + }; + let mut totals = TelemetryTotalsOutput::from(totals); + let mut methods = methods + .into_iter() + .map(MethodTelemetryOutput::from) + .collect::<Vec<_>>(); + for tool in extra_tools { + totals.request_count = totals.request_count.saturating_add(tool.request_count); + totals.success_count = totals.success_count.saturating_add(tool.success_count); + totals.response_error_count = totals + .response_error_count + .saturating_add(tool.response_error_count); + totals.transport_fault_count = totals + .transport_fault_count + .saturating_add(tool.transport_fault_count); + totals.retry_count = totals.retry_count.saturating_add(tool.retry_count); + methods.push(tool); + } + methods.sort_by(|left, right| left.method.cmp(&right.method)); + Self { + uptime_ms, + state, + generation, + consecutive_failures, + restart_count, + totals, + methods, + last_fault: last_fault.map(FaultOutput::from), + } + } +} diff --git a/crates/adequate-rust-mcp/src/worker/tests.rs b/crates/adequate-rust-mcp/src/worker/tests.rs new file mode 100644 index 0000000..1ccd18a --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/tests.rs @@ -0,0 +1,809 @@ +use super::{ + AbsolutePathInput, AdvancedLspMethod, AdvancedLspRequestInput, CommonRenderConfig, + CommonRenderInput, DiagnosticsInput, DiagnosticsJsonOutput, DiagnosticsModeInput, + DiagnosticsPathStyleInput, DiagnosticsRenderConfig, DiagnosticsRenderInput, HoverPayload, + OneIndexedInput, PathStyleInput, SnapshotRenderInput, SymbolQueryInput, + parse_clippy_json_stream, read_workspace_tool_command, read_workspace_tool_metadata, + render_definition_porcelain, render_hover_porcelain, resolve_workspace_fix_command_specs, +}; +use ra_mcp_domain::types::{ + OneIndexedColumn, OneIndexedLine, SourceFilePath, SourcePoint, SourceRange, +}; +use ra_mcp_engine::{DiagnosticEntry, DiagnosticLevel, DiagnosticsReport}; +use serde_json::json; +use std::collections::HashSet; +use std::path::PathBuf; +use url::Url; + +#[test] +fn advanced_lsp_method_mapping_is_unique() { + let methods = vec![ + AdvancedLspMethod::Hover, + AdvancedLspMethod::Definition, + AdvancedLspMethod::References, + AdvancedLspMethod::Declaration, + AdvancedLspMethod::TypeDefinition, + AdvancedLspMethod::Implementation, + AdvancedLspMethod::Completion, + AdvancedLspMethod::CompletionResolve, + AdvancedLspMethod::SignatureHelp, + AdvancedLspMethod::DocumentSymbol, + AdvancedLspMethod::WorkspaceSymbol, + AdvancedLspMethod::WorkspaceSymbolResolve, + AdvancedLspMethod::PrepareRename, + AdvancedLspMethod::Rename, + AdvancedLspMethod::CodeAction, + AdvancedLspMethod::CodeActionResolve, + AdvancedLspMethod::CodeLens, + AdvancedLspMethod::CodeLensResolve, + AdvancedLspMethod::ExecuteCommand, + AdvancedLspMethod::Formatting, + AdvancedLspMethod::RangeFormatting, + AdvancedLspMethod::OnTypeFormatting, + AdvancedLspMethod::DocumentHighlight, + AdvancedLspMethod::DocumentLink, + AdvancedLspMethod::DocumentLinkResolve, + AdvancedLspMethod::DocumentColor, + AdvancedLspMethod::ColorPresentation, + AdvancedLspMethod::LinkedEditingRange, + AdvancedLspMethod::InlayHint, + AdvancedLspMethod::InlayHintResolve, + AdvancedLspMethod::FoldingRange, + AdvancedLspMethod::SelectionRange, + AdvancedLspMethod::DocumentDiagnostic, + AdvancedLspMethod::WorkspaceDiagnostic, + AdvancedLspMethod::SemanticTokensFull, + AdvancedLspMethod::SemanticTokensFullDelta, + AdvancedLspMethod::SemanticTokensRange, + AdvancedLspMethod::Moniker, + AdvancedLspMethod::InlineValue, + AdvancedLspMethod::TypeHierarchyPrepare, + AdvancedLspMethod::TypeHierarchySupertypes, + AdvancedLspMethod::TypeHierarchySubtypes, + AdvancedLspMethod::CallHierarchyPrepare, + AdvancedLspMethod::CallHierarchyIncomingCalls, + AdvancedLspMethod::CallHierarchyOutgoingCalls, + ]; + + let mapped = methods + .iter() + .map(AdvancedLspMethod::as_lsp_method) + .collect::<Vec<_>>(); + let unique = mapped.iter().copied().collect::<HashSet<_>>(); + assert_eq!(mapped.len(), unique.len()); + assert!(mapped.contains(&"textDocument/prepareRename")); + assert!(mapped.contains(&"workspace/executeCommand")); + assert!(mapped.contains(&"textDocument/completion")); + assert!(mapped.contains(&"textDocument/codeAction")); + assert!(mapped.contains(&"textDocument/references")); + assert!(mapped.contains(&"textDocument/rename")); + assert!(mapped.contains(&"textDocument/diagnostic")); + assert!(mapped.contains(&"workspace/diagnostic")); + assert!(mapped.contains(&"completionItem/resolve")); + assert!(mapped.contains(&"textDocument/prepareTypeHierarchy")); +} + +#[test] +fn advanced_lsp_method_deserialization_accepts_alias_shapes() { + let camel = serde_json::from_value::<AdvancedLspMethod>(json!("prepareRename")); + assert!(camel.is_ok()); + assert_eq!( + camel.unwrap_or(AdvancedLspMethod::Hover), + AdvancedLspMethod::PrepareRename + ); + + let kebab = serde_json::from_value::<AdvancedLspMethod>(json!("prepare-rename")); + assert!(kebab.is_ok()); + assert_eq!( + kebab.unwrap_or(AdvancedLspMethod::Hover), + AdvancedLspMethod::PrepareRename + ); + + let full_method = + serde_json::from_value::<AdvancedLspMethod>(json!("textDocument/prepareRename")); + assert!(full_method.is_ok()); + assert_eq!( + full_method.unwrap_or(AdvancedLspMethod::Hover), + AdvancedLspMethod::PrepareRename + ); +} + +#[test] +fn advanced_lsp_request_input_unpacks_json_encoded_string_payload() { + let parsed = serde_json::from_value::<AdvancedLspRequestInput>(json!({ + "method": "workspace/symbol", + "params": "{\"query\":\"resolve_writable_directive_roles\"}" + })); + assert!(parsed.is_ok()); + let parsed = match parsed { + Ok(value) => value, + Err(_) => return, + }; + assert_eq!(parsed.method, AdvancedLspMethod::WorkspaceSymbol); + assert_eq!( + parsed.params, + json!({"query": "resolve_writable_directive_roles"}) + ); +} + +#[test] +fn advanced_lsp_request_input_rejects_malformed_json_payload_string() { + let parsed = serde_json::from_value::<AdvancedLspRequestInput>(json!({ + "method": "workspace/symbol", + "params": "{\"query\":}" + })); + assert!(parsed.is_err()); +} + +#[test] +fn one_indexed_input_deserialization_accepts_loose_numeric_forms() { + let from_string = serde_json::from_value::<OneIndexedInput>(json!("17")); + assert!(from_string.is_ok()); + assert_eq!( + from_string + .unwrap_or(OneIndexedInput(1)) + .normalized_for_one_indexed(), + 17 + ); + + let from_float = serde_json::from_value::<OneIndexedInput>(json!(12.0)); + assert!(from_float.is_ok()); + assert_eq!( + from_float + .unwrap_or(OneIndexedInput(1)) + .normalized_for_one_indexed(), + 12 + ); + + let from_zero = serde_json::from_value::<OneIndexedInput>(json!(0)); + assert!(from_zero.is_ok()); + assert_eq!( + from_zero + .unwrap_or(OneIndexedInput(1)) + .normalized_for_one_indexed(), + 1 + ); +} + +#[test] +fn symbol_query_input_defaults_to_porcelain_relative() { + let temp = tempfile::TempDir::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(value) => value, + Err(_) => return, + }; + let file_path = temp.path().join("symbol_query.rs"); + assert!(std::fs::write(file_path.as_path(), "fn main() {}\n").is_ok()); + + let parsed = serde_json::from_value::<SymbolQueryInput>(json!({ + "file_path": file_path.display().to_string(), + "line": 1, + "column": 1 + })); + assert!(parsed.is_ok()); + let parsed = match parsed { + Ok(value) => value, + Err(_) => return, + }; + let requested = parsed.into_request(); + assert!(requested.is_ok()); + let (position, render_config) = match requested { + Ok(value) => value, + Err(_) => return, + }; + assert_eq!(position.file_path().as_path(), file_path.as_path()); + assert_eq!(position.line().get(), 1); + assert_eq!(position.column().get(), 1); + assert!(matches!(render_config.render, CommonRenderInput::Porcelain)); + assert!(matches!(render_config.path_style, PathStyleInput::Relative)); +} + +#[test] +fn snapshot_render_input_defaults_to_porcelain() { + let parsed = serde_json::from_value::<SnapshotRenderInput>(json!({})); + assert!(parsed.is_ok()); + let parsed = match parsed { + Ok(value) => value, + Err(_) => return, + }; + assert!(matches!(parsed.render(), CommonRenderInput::Porcelain)); +} + +#[test] +fn diagnostics_input_defaults_to_compact_porcelain_absolute_without_limits() { + let parsed = serde_json::from_value::<DiagnosticsInput>(json!({ + "file_path": "/tmp/diagnostics_defaults.rs" + })); + assert!(parsed.is_ok()); + let parsed = match parsed { + Ok(value) => value, + Err(_) => return, + }; + assert!(parsed.mode.is_none()); + assert!(parsed.render.is_none()); + assert!(matches!( + parsed.path_style, + DiagnosticsPathStyleInput::Absolute + )); + assert_eq!(parsed.file_paths.len(), 1); + assert!(parsed.max_items.is_none()); + assert!(parsed.max_message_chars.is_none()); +} + +#[test] +fn diagnostics_input_rejects_legacy_format_field() { + let parsed = serde_json::from_value::<DiagnosticsInput>(json!({ + "file_path": "/tmp/diagnostics_defaults.rs", + "format": "json" + })); + assert!(parsed.is_err()); +} + +#[test] +fn transient_lsp_error_recognizes_server_cancelled() { + assert!(super::errors::is_transient_lsp_error( + -32802, + "server cancelled request during workspace reload", + )); +} + +#[test] +fn diagnostics_input_accepts_file_list_and_preserves_order() { + let temp = tempfile::TempDir::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(value) => value, + Err(_) => return, + }; + let first_path = temp.path().join("first.rs"); + let second_path = temp.path().join("second.rs"); + let write_first = std::fs::write(first_path.as_path(), "fn first() {}\n"); + let write_second = std::fs::write(second_path.as_path(), "fn second() {}\n"); + assert!(write_first.is_ok() && write_second.is_ok()); + + let parsed = serde_json::from_value::<DiagnosticsInput>(json!({ + "file_paths": [ + first_path.display().to_string(), + second_path.display().to_string() + ] + })); + assert!(parsed.is_ok()); + let parsed = match parsed { + Ok(value) => value, + Err(_) => return, + }; + let requested = parsed.into_request(); + assert!(requested.is_ok()); + let (file_paths, _render_config) = match requested { + Ok(value) => value, + Err(_) => return, + }; + assert_eq!(file_paths.len(), 2); + assert_eq!(file_paths[0].as_path(), first_path.as_path()); + assert_eq!(file_paths[1].as_path(), second_path.as_path()); +} + +#[test] +fn diagnostics_input_rejects_missing_file_paths() { + let parsed = serde_json::from_value::<DiagnosticsInput>(json!({ + "file_paths": ["/tmp/does_not_exist_adequate_mcp.rs"] + })); + assert!(parsed.is_ok()); + let parsed = match parsed { + Ok(value) => value, + Err(_) => return, + }; + let requested = parsed.into_request(); + assert!(requested.is_err()); +} + +#[test] +fn render_definition_porcelain_marks_uncertain_empty_results() { + let rendered = render_definition_porcelain( + &[], + CommonRenderConfig::from_user_input( + Some(CommonRenderInput::Porcelain), + Some(PathStyleInput::Relative), + ), + true, + ); + assert_eq!( + rendered, + "0 definitions\nnote: result may be incomplete during indexing; retry in a few seconds" + ); +} + +#[test] +fn render_hover_porcelain_prefers_signature_and_appends_uncertain_note() { + let rendered = render_hover_porcelain( + &HoverPayload { + rendered: Some( + "```rust\nfn parse_launch_mode(args: impl IntoIterator<Item = OsString>) -> Result<LaunchMode, String>\n```\n\nParses the launch mode." + .to_owned(), + ), + range: None, + }, + CommonRenderConfig::from_user_input( + Some(CommonRenderInput::Porcelain), + Some(PathStyleInput::Relative), + ), + true, + ); + assert!(rendered.contains("fn parse_launch_mode(")); + assert!(rendered.contains("note: result may be incomplete during indexing")); + assert!(!rendered.contains("```")); +} + +#[test] +fn diagnostics_compact_projection_preserves_full_information_without_limits() { + let file_path = SourceFilePath::try_new(PathBuf::from("/tmp/diag_projection.rs")); + assert!(file_path.is_ok()); + let file_path = match file_path { + Ok(value) => value, + Err(_) => return, + }; + let range = |line: u64, start_column: u64, end_column: u64| { + SourceRange::try_new( + file_path.clone(), + SourcePoint::new( + OneIndexedLine::try_new(line).expect("valid test line"), + OneIndexedColumn::try_new(start_column).expect("valid test column"), + ), + SourcePoint::new( + OneIndexedLine::try_new(line).expect("valid test line"), + OneIndexedColumn::try_new(end_column).expect("valid test column"), + ), + ) + .expect("valid test range") + }; + let first_range = range(3, 7, 12); + let second_range = range(8, 2, 9); + let report = DiagnosticsReport { + diagnostics: vec![ + DiagnosticEntry { + range: first_range, + level: DiagnosticLevel::Error, + code: Some("E0382".to_owned()), + message: "borrow of moved value".to_owned(), + }, + DiagnosticEntry { + range: second_range, + level: DiagnosticLevel::Warning, + code: None, + message: "unused variable".to_owned(), + }, + ], + }; + + let full = DiagnosticsJsonOutput::from_report( + report.clone(), + DiagnosticsRenderConfig { + mode: DiagnosticsModeInput::Full, + render: DiagnosticsRenderInput::Json, + max_items: None, + max_message_chars: None, + path_style: DiagnosticsPathStyleInput::Absolute, + }, + ); + let compact = DiagnosticsJsonOutput::from_report( + report, + DiagnosticsRenderConfig { + mode: DiagnosticsModeInput::Compact, + render: DiagnosticsRenderInput::Json, + max_items: None, + max_message_chars: None, + path_style: DiagnosticsPathStyleInput::Absolute, + }, + ); + + assert!(compact.diagnostics.is_none()); + assert_eq!(compact.overflow_count, 0); + assert!(!compact.truncated); + assert_eq!(compact.counts.total_count, 2); + assert_eq!(compact.counts.error_count, 1); + assert_eq!(compact.counts.warning_count, 1); + assert_eq!(compact.items.len(), 2); + + let full_items = full.diagnostics.unwrap_or_default(); + assert_eq!(full_items.len(), compact.items.len()); + for (full_item, compact_item) in full_items.iter().zip(compact.items.iter()) { + let expected_severity = full_item.level; + let start = &full_item.range.start; + let end = &full_item.range.end; + assert_eq!(compact_item.severity, expected_severity); + assert_eq!(compact_item.file_path, full_item.range.file_path); + assert_eq!(compact_item.start_line, start.line); + assert_eq!(compact_item.start_column, start.column); + assert_eq!(compact_item.end_line, end.line); + assert_eq!(compact_item.end_column, end.column); + assert_eq!(compact_item.code, full_item.code); + assert_eq!(compact_item.message, full_item.message); + } +} + +#[test] +fn diagnostics_compact_limits_apply_truncation_metadata() { + let file_path = SourceFilePath::try_new(PathBuf::from("/tmp/diag_limits.rs")); + assert!(file_path.is_ok()); + let file_path = match file_path { + Ok(value) => value, + Err(_) => return, + }; + let make_range = |line: u64, col_start: u64, col_end: u64| -> Option<SourceRange> { + let start = SourcePoint::new( + OneIndexedLine::try_new(line).ok()?, + OneIndexedColumn::try_new(col_start).ok()?, + ); + let end = SourcePoint::new( + OneIndexedLine::try_new(line).ok()?, + OneIndexedColumn::try_new(col_end).ok()?, + ); + SourceRange::try_new(file_path.clone(), start, end).ok() + }; + let first = make_range(1, 1, 5); + let second = make_range(2, 1, 6); + let third = make_range(3, 1, 7); + assert!(first.is_some() && second.is_some() && third.is_some()); + let report = DiagnosticsReport { + diagnostics: vec![ + DiagnosticEntry { + range: first.unwrap_or_else(|| unreachable!()), + level: DiagnosticLevel::Error, + code: None, + message: "123456789".to_owned(), + }, + DiagnosticEntry { + range: second.unwrap_or_else(|| unreachable!()), + level: DiagnosticLevel::Warning, + code: None, + message: "abcdefghi".to_owned(), + }, + DiagnosticEntry { + range: third.unwrap_or_else(|| unreachable!()), + level: DiagnosticLevel::Hint, + code: None, + message: "should_not_be_visible".to_owned(), + }, + ], + }; + let compact = DiagnosticsJsonOutput::from_report( + report, + DiagnosticsRenderConfig { + mode: DiagnosticsModeInput::Compact, + render: DiagnosticsRenderInput::Json, + max_items: Some(2), + max_message_chars: Some(8), + path_style: DiagnosticsPathStyleInput::Absolute, + }, + ); + assert!(compact.truncated); + assert_eq!(compact.overflow_count, 1); + assert_eq!(compact.items.len(), 2); + assert_eq!(compact.items[0].message, "12345..."); + assert_eq!(compact.items[1].message, "abcde..."); +} + +#[test] +fn manifest_clippy_command_override_is_read() { + let temp = tempfile::TempDir::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(value) => value, + Err(_) => return, + }; + let manifest = temp.path().join("Cargo.toml"); + let write = std::fs::write( + manifest.as_path(), + r#" +[workspace] +members = [] + +[workspace.metadata.adequate-rust-mcp] +clippy_command = ["cargo", "clippy", "--workspace", "--message-format=json", "--", "-Dwarnings"] +"#, + ); + assert!(write.is_ok()); + let metadata = read_workspace_tool_metadata(temp.path()); + assert!(metadata.is_ok()); + let metadata = match metadata { + Ok(value) => value, + Err(_) => return, + }; + let parsed = read_workspace_tool_command( + metadata.as_ref(), + "clippy_command", + &["clippy_command", "clippyCommand"], + ); + assert!(parsed.is_ok()); + let parsed = parsed.ok().flatten(); + assert!(parsed.is_some()); + let parsed = parsed.unwrap_or_else(|| unreachable!()); + assert_eq!(parsed.program, "cargo"); + assert_eq!( + parsed.args, + vec![ + "clippy", + "--workspace", + "--message-format=json", + "--", + "-Dwarnings" + ] + ); +} + +#[test] +fn manifest_fix_and_format_command_overrides_are_read() { + let temp = tempfile::TempDir::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(value) => value, + Err(_) => return, + }; + let manifest = temp.path().join("Cargo.toml"); + let write = std::fs::write( + manifest.as_path(), + r#" +[workspace] +members = [] + +[workspace.metadata.adequate-rust-mcp] +format_command = ["cargo", "fmt", "--all", "--check"] +fix_command = ["cargo", "clippy", "--fix", "--workspace", "--all-targets"] +"#, + ); + assert!(write.is_ok()); + + let parsed = resolve_workspace_fix_command_specs(temp.path()); + assert!(parsed.is_ok()); + let parsed = match parsed { + Ok(value) => value, + Err(_) => return, + }; + let (format_command, fix_command) = parsed; + assert_eq!(format_command.program, "cargo"); + assert_eq!(format_command.args, vec!["fmt", "--all", "--check"]); + assert_eq!(fix_command.program, "cargo"); + assert_eq!( + fix_command.args, + vec!["clippy", "--fix", "--workspace", "--all-targets"] + ); +} + +#[test] +fn parse_clippy_json_stream_filters_to_target_file() { + let temp = tempfile::TempDir::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(value) => value, + Err(_) => return, + }; + let src_dir = temp.path().join("src"); + let create_dir = std::fs::create_dir_all(src_dir.as_path()); + assert!(create_dir.is_ok()); + let main_file = src_dir.join("main.rs"); + let lib_file = src_dir.join("lib.rs"); + let write_main = std::fs::write(main_file.as_path(), "fn main() {}\n"); + let write_lib = std::fs::write(lib_file.as_path(), "pub fn helper() {}\n"); + assert!(write_main.is_ok() && write_lib.is_ok()); + let target_path = SourceFilePath::try_new(main_file.clone()); + assert!(target_path.is_ok()); + let target_path = match target_path { + Ok(value) => value, + Err(_) => return, + }; + + let target_message = json!({ + "reason": "compiler-message", + "message": { + "message": "manual implementation of Option::map", + "level": "warning", + "code": { "code": "clippy::manual_map" }, + "spans": [ + { + "file_name": "src/main.rs", + "line_start": 1, + "line_end": 1, + "column_start": 1, + "column_end": 5, + "is_primary": true + } + ] + } + }) + .to_string(); + let other_message = json!({ + "reason": "compiler-message", + "message": { + "message": "unused function", + "level": "warning", + "code": { "code": "dead_code" }, + "spans": [ + { + "file_name": "src/lib.rs", + "line_start": 1, + "line_end": 1, + "column_start": 1, + "column_end": 3, + "is_primary": true + } + ] + } + }) + .to_string(); + + let stream = format!("{target_message}\n{other_message}\n"); + let diagnostics = parse_clippy_json_stream(stream.as_str(), &[target_path], temp.path()); + assert_eq!(diagnostics.len(), 1); + let diagnostic = diagnostics.first().cloned(); + assert!(diagnostic.is_some()); + let diagnostic = diagnostic.unwrap_or_else(|| unreachable!()); + assert_eq!(diagnostic.code.unwrap_or_default(), "clippy::manual_map"); + assert_eq!(diagnostic.message, "manual implementation of Option::map"); +} + +#[test] +fn parse_clippy_json_stream_emits_in_requested_file_order() { + let temp = tempfile::TempDir::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(value) => value, + Err(_) => return, + }; + let src_dir = temp.path().join("src"); + let create_dir = std::fs::create_dir_all(src_dir.as_path()); + assert!(create_dir.is_ok()); + let main_file = src_dir.join("main.rs"); + let lib_file = src_dir.join("lib.rs"); + let write_main = std::fs::write(main_file.as_path(), "fn main() {}\n"); + let write_lib = std::fs::write(lib_file.as_path(), "pub fn helper() {}\n"); + assert!(write_main.is_ok() && write_lib.is_ok()); + + let main_target = SourceFilePath::try_new(main_file.clone()); + let lib_target = SourceFilePath::try_new(lib_file.clone()); + assert!(main_target.is_ok() && lib_target.is_ok()); + let main_target = match main_target { + Ok(value) => value, + Err(_) => return, + }; + let lib_target = match lib_target { + Ok(value) => value, + Err(_) => return, + }; + + let main_message = json!({ + "reason": "compiler-message", + "message": { + "message": "main warning", + "level": "warning", + "code": { "code": "clippy::main" }, + "spans": [ + { + "file_name": "src/main.rs", + "line_start": 1, + "line_end": 1, + "column_start": 1, + "column_end": 4, + "is_primary": true + } + ] + } + }) + .to_string(); + let lib_message = json!({ + "reason": "compiler-message", + "message": { + "message": "lib warning", + "level": "warning", + "code": { "code": "clippy::lib" }, + "spans": [ + { + "file_name": "src/lib.rs", + "line_start": 1, + "line_end": 1, + "column_start": 1, + "column_end": 4, + "is_primary": true + } + ] + } + }) + .to_string(); + + let stream = format!("{main_message}\n{lib_message}\n"); + let requested = vec![lib_target, main_target]; + let diagnostics = parse_clippy_json_stream(stream.as_str(), requested.as_slice(), temp.path()); + assert_eq!(diagnostics.len(), 2); + assert_eq!( + diagnostics[0].code.clone().unwrap_or_default(), + "clippy::lib" + ); + assert_eq!( + diagnostics[1].code.clone().unwrap_or_default(), + "clippy::main" + ); +} + +#[test] +fn parse_clippy_json_stream_deduplicates_identical_messages() { + let temp = tempfile::TempDir::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(value) => value, + Err(_) => return, + }; + let src_dir = temp.path().join("src"); + let create_dir = std::fs::create_dir_all(src_dir.as_path()); + assert!(create_dir.is_ok()); + let main_file = src_dir.join("main.rs"); + let write_main = std::fs::write(main_file.as_path(), "fn main() {}\n"); + assert!(write_main.is_ok()); + + let target_path = SourceFilePath::try_new(main_file.clone()); + assert!(target_path.is_ok()); + let target_path = match target_path { + Ok(value) => value, + Err(_) => return, + }; + + let message = json!({ + "reason": "compiler-message", + "message": { + "message": "duplicate warning", + "level": "warning", + "code": { "code": "clippy::duplicate" }, + "spans": [ + { + "file_name": "src/main.rs", + "line_start": 1, + "line_end": 1, + "column_start": 1, + "column_end": 4, + "is_primary": true + } + ] + } + }) + .to_string(); + + let stream = format!("{message}\n{message}\n"); + let diagnostics = parse_clippy_json_stream(stream.as_str(), &[target_path], temp.path()); + assert_eq!(diagnostics.len(), 1); + assert_eq!( + diagnostics[0].code.clone().unwrap_or_default(), + "clippy::duplicate" + ); +} + +#[test] +fn absolute_path_input_normalizes_file_uris_and_relative_paths() { + let temp = tempfile::TempDir::new(); + assert!(temp.is_ok()); + let temp = match temp { + Ok(value) => value, + Err(_) => return, + }; + let workspace_file = temp.path().join("mod.rs"); + let uri = Url::from_file_path(&workspace_file); + assert!(uri.is_ok()); + let uri = match uri { + Ok(value) => value, + Err(_) => return, + }; + + let from_uri = AbsolutePathInput(uri.to_string()).into_source_file_path(); + assert!(from_uri.is_ok()); + + let previous_dir = std::env::current_dir(); + assert!(previous_dir.is_ok()); + let previous_dir = match previous_dir { + Ok(value) => value, + Err(_) => return, + }; + + let set_into_temp = std::env::set_current_dir(temp.path()); + assert!(set_into_temp.is_ok()); + let from_relative = AbsolutePathInput("mod.rs".to_owned()).into_source_file_path(); + assert!(from_relative.is_ok()); + let _result = std::env::set_current_dir(previous_dir); +} diff --git a/crates/adequate-rust-mcp/src/worker/workspace.rs b/crates/adequate-rust-mcp/src/worker/workspace.rs new file mode 100644 index 0000000..ef7ed47 --- /dev/null +++ b/crates/adequate-rust-mcp/src/worker/workspace.rs @@ -0,0 +1,313 @@ +use super::{ + FixEverythingOutput, FixStepOutput, PorcelainErrorKind, porcelain_internal_error, + porcelain_invalid_params, +}; +use rmcp::ErrorData as McpError; +use std::{ + fs, + path::{Path, PathBuf}, +}; +use tokio::process::Command; + +pub(super) const COMMAND_OUTPUT_EXCERPT_CHAR_LIMIT: usize = 1_500; + +#[derive(Debug, Clone)] +pub(super) struct WorkspaceCommandSpec { + pub(super) program: String, + pub(super) args: Vec<String>, +} + +#[derive(Debug, Clone)] +pub(super) struct WorkspaceCommandRunOutput { + pub(super) status: std::process::ExitStatus, + pub(super) standard_output: String, + pub(super) standard_error: String, +} + +impl WorkspaceCommandSpec { + pub(super) fn rendered(&self) -> String { + let mut parts = Vec::with_capacity(self.args.len().saturating_add(1)); + parts.push(self.program.as_str()); + parts.extend(self.args.iter().map(String::as_str)); + parts.join(" ") + } + + pub(super) fn into_argv(self) -> Vec<String> { + let Self { program, args } = self; + let mut command = Vec::with_capacity(args.len().saturating_add(1)); + command.push(program); + command.extend(args); + command + } +} + +pub(super) async fn run_workspace_fix_everything() -> Result<FixEverythingOutput, McpError> { + let workspace_root = resolve_workspace_root_path()?; + let (format_command, fix_command) = + resolve_workspace_fix_command_specs(workspace_root.as_path())?; + let format_step = + run_workspace_fix_step(workspace_root.as_path(), "format_workspace", format_command) + .await?; + let fix_step = run_workspace_fix_step( + workspace_root.as_path(), + "clippy_fix_workspace", + fix_command, + ) + .await?; + let steps = vec![format_step, fix_step]; + let success = steps.iter().all(|step| step.success); + Ok(FixEverythingOutput { + success, + workspace_root: workspace_root.display().to_string(), + steps, + }) +} + +pub(super) async fn run_workspace_fix_step( + workspace_root: &Path, + step_name: &str, + command_spec: WorkspaceCommandSpec, +) -> Result<FixStepOutput, McpError> { + let rendered_command = command_spec.rendered(); + let output = + run_workspace_command(workspace_root, &command_spec, rendered_command.as_str()).await?; + Ok(FixStepOutput { + step_name: step_name.to_owned(), + command: command_spec.into_argv(), + success: output.status.success(), + exit_code: output.status.code(), + standard_output_excerpt: command_output_excerpt(output.standard_output.as_str()), + standard_error_excerpt: command_output_excerpt(output.standard_error.as_str()), + }) +} + +pub(super) fn resolve_workspace_root_path() -> Result<PathBuf, McpError> { + let from_env = std::env::var("ADEQUATE_MCP_WORKSPACE_ROOT") + .ok() + .map(PathBuf::from); + let raw_root = match from_env { + Some(path) => path, + None => std::env::current_dir().map_err(|_| { + porcelain_internal_error( + "failed to determine current working directory", + PorcelainErrorKind::InternalFailure, + Some("set ADEQUATE_MCP_WORKSPACE_ROOT explicitly"), + false, + ) + })?, + }; + let normalized = fs::canonicalize(&raw_root).unwrap_or(raw_root); + if normalized.is_dir() { + Ok(normalized) + } else { + Err(porcelain_invalid_params( + "workspace root must be a directory", + PorcelainErrorKind::InvalidInput, + Some("set ADEQUATE_MCP_WORKSPACE_ROOT to a project directory"), + )) + } +} + +pub(super) fn default_clippy_command_spec() -> WorkspaceCommandSpec { + WorkspaceCommandSpec { + program: "cargo".to_owned(), + args: vec![ + "clippy".to_owned(), + "--workspace".to_owned(), + "--all-targets".to_owned(), + "--all-features".to_owned(), + "--message-format=json".to_owned(), + ], + } +} + +pub(super) fn default_format_command_spec() -> WorkspaceCommandSpec { + WorkspaceCommandSpec { + program: "cargo".to_owned(), + args: vec!["fmt".to_owned(), "--all".to_owned()], + } +} + +pub(super) fn default_fix_command_spec() -> WorkspaceCommandSpec { + WorkspaceCommandSpec { + program: "cargo".to_owned(), + args: vec![ + "clippy".to_owned(), + "--fix".to_owned(), + "--workspace".to_owned(), + "--all-targets".to_owned(), + "--all-features".to_owned(), + "--allow-dirty".to_owned(), + "--allow-staged".to_owned(), + ], + } +} + +pub(super) fn resolve_clippy_command_spec( + workspace_root: &Path, +) -> Result<WorkspaceCommandSpec, McpError> { + let metadata = read_workspace_tool_metadata(workspace_root)?; + let command = read_workspace_tool_command( + metadata.as_ref(), + "clippy_command", + &["clippy_command", "clippyCommand"], + )?; + Ok(command.unwrap_or_else(default_clippy_command_spec)) +} + +pub(super) fn resolve_workspace_fix_command_specs( + workspace_root: &Path, +) -> Result<(WorkspaceCommandSpec, WorkspaceCommandSpec), McpError> { + let metadata = read_workspace_tool_metadata(workspace_root)?; + let format_command = read_workspace_tool_command( + metadata.as_ref(), + "format_command", + &[ + "format_command", + "formatCommand", + "fmt_command", + "fmtCommand", + ], + )? + .unwrap_or_else(default_format_command_spec); + let fix_command = read_workspace_tool_command( + metadata.as_ref(), + "fix_command", + &[ + "fix_command", + "fixCommand", + "fix_everything_command", + "fixEverythingCommand", + ], + )? + .unwrap_or_else(default_fix_command_spec); + Ok((format_command, fix_command)) +} + +pub(super) fn read_workspace_tool_metadata( + workspace_root: &Path, +) -> Result<Option<toml::map::Map<String, toml::Value>>, McpError> { + let manifest_path = workspace_root.join("Cargo.toml"); + let manifest_text = fs::read_to_string(manifest_path.as_path()).map_err(|_| { + porcelain_internal_error( + "failed reading workspace Cargo.toml for tool configuration", + PorcelainErrorKind::ToolRuntimeFailure, + Some("ensure workspace root points to a Cargo workspace"), + false, + ) + })?; + let manifest = manifest_text.parse::<toml::Value>().map_err(|_| { + porcelain_internal_error( + "failed parsing workspace Cargo.toml for tool configuration", + PorcelainErrorKind::ToolRuntimeFailure, + Some("fix Cargo.toml syntax under [workspace.metadata]"), + false, + ) + })?; + let metadata = manifest + .get("workspace") + .and_then(toml::Value::as_table) + .and_then(|workspace| workspace.get("metadata")) + .and_then(toml::Value::as_table) + .and_then(|metadata| { + metadata + .get("adequate-rust-mcp") + .or_else(|| metadata.get("adequate_rust_mcp")) + }) + .and_then(toml::Value::as_table) + .cloned(); + Ok(metadata) +} + +pub(super) fn read_workspace_tool_command( + metadata: Option<&toml::map::Map<String, toml::Value>>, + setting_name: &'static str, + key_aliases: &[&str], +) -> Result<Option<WorkspaceCommandSpec>, McpError> { + let Some(metadata) = metadata else { + return Ok(None); + }; + let value = key_aliases.iter().find_map(|key| metadata.get(*key)); + let Some(value) = value else { + return Ok(None); + }; + let command_items = value.as_array().ok_or_else(|| { + porcelain_invalid_params( + format!("workspace metadata {setting_name} must be an array of strings"), + PorcelainErrorKind::InvalidInput, + Some("set command metadata to an array like [\"cargo\", \"...\"]"), + ) + })?; + let mut command = command_items + .iter() + .map(toml::Value::as_str) + .collect::<Option<Vec<_>>>() + .ok_or_else(|| { + porcelain_invalid_params( + format!("workspace metadata {setting_name} must be an array of strings"), + PorcelainErrorKind::InvalidInput, + Some("set command metadata to an array like [\"cargo\", \"...\"]"), + ) + })? + .into_iter() + .map(str::to_owned) + .collect::<Vec<_>>(); + if command.is_empty() || command[0].trim().is_empty() { + return Err(porcelain_invalid_params( + format!("workspace metadata {setting_name} must include a binary name"), + PorcelainErrorKind::InvalidInput, + Some("first entry in the command array must be executable name"), + )); + } + let program = command.remove(0); + Ok(Some(WorkspaceCommandSpec { + program, + args: command, + })) +} + +pub(super) async fn run_workspace_command( + workspace_root: &Path, + command_spec: &WorkspaceCommandSpec, + rendered_command: &str, +) -> Result<WorkspaceCommandRunOutput, McpError> { + let mut command = Command::new(command_spec.program.as_str()); + let _configured_command = command + .args(command_spec.args.as_slice()) + .current_dir(workspace_root) + .env("CARGO_TERM_COLOR", "never"); + let output = command.output().await.map_err(|_| { + porcelain_internal_error( + format!("failed to spawn workspace command `{rendered_command}`"), + PorcelainErrorKind::ToolRuntimeFailure, + Some("ensure required toolchain binaries are installed and available in PATH"), + false, + ) + })?; + Ok(WorkspaceCommandRunOutput { + status: output.status, + standard_output: String::from_utf8_lossy(output.stdout.as_slice()).into_owned(), + standard_error: String::from_utf8_lossy(output.stderr.as_slice()).into_owned(), + }) +} + +pub(super) fn command_output_excerpt(output: &str) -> Option<String> { + let trimmed = output.trim(); + if trimmed.is_empty() { + return None; + } + let total_chars = trimmed.chars().count(); + if total_chars <= COMMAND_OUTPUT_EXCERPT_CHAR_LIMIT { + return Some(trimmed.to_owned()); + } + let keep_from_char = total_chars.saturating_sub(COMMAND_OUTPUT_EXCERPT_CHAR_LIMIT); + let keep_from_byte = trimmed + .char_indices() + .nth(keep_from_char) + .map_or(0, |(index, _)| index); + Some(format!( + "[...truncated {} chars...] {}", + keep_from_char, + &trimmed[keep_from_byte..] + )) +} diff --git a/crates/adequate-rust-mcp/tests/diagnostics_warmup_retry.rs b/crates/adequate-rust-mcp/tests/diagnostics_warmup_retry.rs new file mode 100644 index 0000000..b27c34f --- /dev/null +++ b/crates/adequate-rust-mcp/tests/diagnostics_warmup_retry.rs @@ -0,0 +1,403 @@ +//! Integration test for transient unlinked-file diagnostics during warm-up. + +use notify as _; +use ra_mcp_domain as _; +use ra_mcp_engine as _; +use rmcp as _; +use schemars as _; +use serde as _; +use serde_json::{Value, json}; +use serial_test::serial; +use std::{ + error::Error, + fs, io, + path::{Path, PathBuf}, + process::Stdio, + time::Duration, +}; +use tempfile::TempDir; +use tokio::{ + io::{AsyncBufReadExt, AsyncWriteExt, BufReader, Lines}, + process::{Child, ChildStdin, ChildStdout, Command}, +}; +use toml as _; +use tracing as _; +use tracing_subscriber as _; +use url as _; + +const RESPONSE_TIMEOUT: Duration = Duration::from_secs(20); + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn diagnostics_retries_transient_unlinked_file_reports() -> Result<(), Box<dyn Error>> { + let fixture = Fixture::new()?; + let worker_binary = resolve_worker_binary()?; + let fake_ra_binary = resolve_fake_ra_binary()?; + if !fake_ra_binary.exists() { + eprintln!( + "skipping warmup diagnostics test: fake-rust-analyzer missing at {}", + fake_ra_binary.display() + ); + return Ok(()); + } + + let fake_wrapper = fixture.path().join("fake-ra-warmup.sh"); + write_fake_ra_wrapper( + fake_wrapper.as_path(), + fake_ra_binary.as_path(), + FakeRaBehavior { + diagnostic_warmup_count: 2, + diagnostic_cancel_count: 0, + }, + )?; + + let mut harness = WorkerHarness::spawn( + worker_binary.as_path(), + fixture.path(), + fake_wrapper.as_path(), + ) + .await?; + harness.initialize().await?; + + let diagnostics = harness + .call_tool( + "diagnostics", + json!({ + "file_path": fixture.source_file().display().to_string(), + "render": "json", + "mode": "full" + }), + ) + .await?; + let items = diagnostics + .get("diagnostics") + .and_then(Value::as_array) + .ok_or_else(|| io::Error::other("diagnostics payload missing diagnostics array"))?; + assert_eq!(items.len(), 1); + let message = items[0] + .get("message") + .and_then(Value::as_str) + .ok_or_else(|| io::Error::other("diagnostic missing message"))?; + assert_eq!(message, "fake diagnostic"); + let code = items[0].get("code").cloned().unwrap_or(Value::Null); + assert_eq!(code, Value::Null); + + harness.shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn diagnostics_retries_server_cancelled_response() -> Result<(), Box<dyn Error>> { + let fixture = Fixture::new()?; + let worker_binary = resolve_worker_binary()?; + let fake_ra_binary = resolve_fake_ra_binary()?; + if !fake_ra_binary.exists() { + eprintln!( + "skipping cancelled diagnostics test: fake-rust-analyzer missing at {}", + fake_ra_binary.display() + ); + return Ok(()); + } + + let fake_wrapper = fixture.path().join("fake-ra-cancel.sh"); + write_fake_ra_wrapper( + fake_wrapper.as_path(), + fake_ra_binary.as_path(), + FakeRaBehavior { + diagnostic_warmup_count: 0, + diagnostic_cancel_count: 1, + }, + )?; + + let mut harness = WorkerHarness::spawn( + worker_binary.as_path(), + fixture.path(), + fake_wrapper.as_path(), + ) + .await?; + harness.initialize().await?; + + let diagnostics = harness + .call_tool( + "diagnostics", + json!({ + "file_path": fixture.source_file().display().to_string(), + "render": "json", + "mode": "full" + }), + ) + .await?; + let items = diagnostics + .get("diagnostics") + .and_then(Value::as_array) + .ok_or_else(|| io::Error::other("diagnostics payload missing diagnostics array"))?; + assert_eq!(items.len(), 1); + let message = items[0] + .get("message") + .and_then(Value::as_str) + .ok_or_else(|| io::Error::other("diagnostic missing message"))?; + assert_eq!(message, "fake diagnostic"); + + harness.shutdown().await; + Ok(()) +} + +struct Fixture { + temp_dir: TempDir, + source_file: PathBuf, +} + +impl Fixture { + fn new() -> Result<Self, Box<dyn Error>> { + let temp_dir = tempfile::tempdir()?; + let src = temp_dir.path().join("src"); + fs::create_dir_all(&src)?; + fs::write( + temp_dir.path().join("Cargo.toml"), + "[package]\nname = \"diagnostics_warmup_fixture\"\nversion = \"0.0.0\"\nedition = \"2024\"\n", + )?; + let source_file = src.join("lib.rs"); + fs::write(&source_file, "pub fn compute() -> i32 { 1 }\n")?; + Ok(Self { + temp_dir, + source_file, + }) + } + + fn path(&self) -> &Path { + self.temp_dir.path() + } + + fn source_file(&self) -> &Path { + self.source_file.as_path() + } +} + +struct WorkerHarness { + child: Child, + stdin: ChildStdin, + stdout: Lines<BufReader<ChildStdout>>, + next_id: u64, +} + +impl Drop for WorkerHarness { + fn drop(&mut self) { + let _ = self.child.start_kill(); + } +} + +impl WorkerHarness { + async fn spawn( + worker_binary: &Path, + workspace_root: &Path, + fake_ra_binary: &Path, + ) -> Result<Self, Box<dyn Error>> { + let mut child = Command::new(worker_binary) + .arg("--worker") + .env("ADEQUATE_MCP_WORKSPACE_ROOT", workspace_root) + .env("ADEQUATE_MCP_RA_BINARY", fake_ra_binary) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::null()) + .spawn()?; + + let stdin = child + .stdin + .take() + .ok_or_else(|| io::Error::other("worker stdin unavailable"))?; + let stdout = child + .stdout + .take() + .ok_or_else(|| io::Error::other("worker stdout unavailable"))?; + Ok(Self { + child, + stdin, + stdout: BufReader::new(stdout).lines(), + next_id: 1, + }) + } + + async fn initialize(&mut self) -> Result<(), Box<dyn Error>> { + let _ = self + .request( + "initialize", + json!({ + "protocolVersion": "2025-11-25", + "capabilities": {}, + "clientInfo": { + "name": "diagnostics-warmup-test", + "version": "1.0.0" + } + }), + ) + .await?; + self.notify("notifications/initialized", json!({})).await?; + Ok(()) + } + + async fn call_tool( + &mut self, + tool_name: &str, + arguments: Value, + ) -> Result<Value, Box<dyn Error>> { + let response = self + .request( + "tools/call", + json!({ + "name": tool_name, + "arguments": arguments, + }), + ) + .await?; + let result = response + .get("result") + .ok_or_else(|| io::Error::other("tool response missing result"))?; + let is_error = result + .get("isError") + .and_then(Value::as_bool) + .unwrap_or(false); + if is_error { + return Err(Box::new(io::Error::other(format!( + "tool `{tool_name}` returned error payload: {result}" + )))); + } + Ok(result + .get("structuredContent") + .cloned() + .unwrap_or(Value::Null)) + } + + async fn request(&mut self, method: &str, params: Value) -> Result<Value, Box<dyn Error>> { + let id = self.next_id; + self.next_id = self.next_id.saturating_add(1); + let payload = json!({ + "jsonrpc": "2.0", + "id": id, + "method": method, + "params": params, + }); + self.write_message(&payload).await?; + self.read_response(id).await + } + + async fn notify(&mut self, method: &str, params: Value) -> Result<(), Box<dyn Error>> { + let payload = json!({ + "jsonrpc": "2.0", + "method": method, + "params": params, + }); + self.write_message(&payload).await + } + + async fn write_message(&mut self, message: &Value) -> Result<(), Box<dyn Error>> { + let serialized = serde_json::to_vec(message)?; + self.stdin.write_all(&serialized).await?; + self.stdin.write_all(b"\n").await?; + self.stdin.flush().await?; + Ok(()) + } + + async fn read_response(&mut self, request_id: u64) -> Result<Value, Box<dyn Error>> { + let deadline = tokio::time::Instant::now() + RESPONSE_TIMEOUT; + loop { + if tokio::time::Instant::now() >= deadline { + return Err(Box::new(io::Error::new( + io::ErrorKind::TimedOut, + format!("timed out waiting for response id {request_id}"), + ))); + } + let remaining = deadline.saturating_duration_since(tokio::time::Instant::now()); + let next_line = tokio::time::timeout(remaining, self.stdout.next_line()).await; + let line = match next_line { + Ok(Ok(Some(line))) => line, + Ok(Ok(None)) => { + return Err(Box::new(io::Error::new( + io::ErrorKind::UnexpectedEof, + "worker stdout closed while awaiting response", + ))); + } + Ok(Err(error)) => return Err(Box::new(error)), + Err(_) => { + return Err(Box::new(io::Error::new( + io::ErrorKind::TimedOut, + format!("timed out waiting for response id {request_id}"), + ))); + } + }; + let message = match serde_json::from_str::<Value>(&line) { + Ok(message) => message, + Err(_) => continue, + }; + if message.get("id").and_then(Value::as_u64) == Some(request_id) { + return Ok(message); + } + } + } + + async fn shutdown(&mut self) { + let _ = self.child.kill().await; + let _ = self.child.wait().await; + } +} + +#[derive(Clone, Copy)] +struct FakeRaBehavior { + diagnostic_warmup_count: u8, + diagnostic_cancel_count: u8, +} + +fn write_fake_ra_wrapper( + script_path: &Path, + fake_ra_binary: &Path, + behavior: FakeRaBehavior, +) -> io::Result<()> { + let script = format!( + "#!/usr/bin/env bash\nexec \"{}\" --mode stable --diagnostic-warmup-count {} --diagnostic-cancel-count {}\n", + fake_ra_binary.display(), + behavior.diagnostic_warmup_count, + behavior.diagnostic_cancel_count + ); + fs::write(script_path, script)?; + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + + let permissions = fs::Permissions::from_mode(0o755); + fs::set_permissions(script_path, permissions)?; + } + Ok(()) +} + +fn resolve_worker_binary() -> Result<PathBuf, Box<dyn Error>> { + if let Ok(path) = std::env::var("CARGO_BIN_EXE_adequate-rust-mcp") { + return Ok(PathBuf::from(path)); + } + + let current = std::env::current_exe()?; + let deps_dir = current + .parent() + .ok_or_else(|| io::Error::other("failed to find integration test deps directory"))?; + let target_dir = deps_dir + .parent() + .ok_or_else(|| io::Error::other("failed to resolve target debug directory"))?; + Ok(target_dir.join("adequate-rust-mcp")) +} + +fn resolve_fake_ra_binary() -> Result<PathBuf, Box<dyn Error>> { + if let Ok(path) = std::env::var("CARGO_BIN_EXE_fake-rust-analyzer") { + return Ok(PathBuf::from(path)); + } + if let Ok(path) = std::env::var("CARGO_BIN_EXE_fake_rust_analyzer") { + return Ok(PathBuf::from(path)); + } + let current = std::env::current_exe()?; + let deps_dir = current + .parent() + .ok_or_else(|| io::Error::other("failed to resolve integration test deps directory"))?; + let target_debug = deps_dir + .parent() + .ok_or_else(|| io::Error::other("failed to resolve target debug directory"))?; + Ok(target_debug.join("fake-rust-analyzer")) +} +use libmcp as _; diff --git a/crates/adequate-rust-mcp/tests/e2e_gauntlet.rs b/crates/adequate-rust-mcp/tests/e2e_gauntlet.rs new file mode 100644 index 0000000..a5a2861 --- /dev/null +++ b/crates/adequate-rust-mcp/tests/e2e_gauntlet.rs @@ -0,0 +1,926 @@ +//! Optional live-fire end-to-end MCP gauntlet against a real rust-analyzer process. +//! +//! This test is gated behind `ADEQUATE_MCP_ENABLE_E2E=1` because it is intentionally +//! heavyweight and depends on a local rust-analyzer binary. + +use notify as _; +use ra_mcp_domain as _; +use ra_mcp_engine as _; +use rmcp as _; +use schemars as _; +use serde as _; +use serde_json::{Value, json}; +use serial_test::serial; +use std::{ + collections::HashMap, + error::Error, + fs, io, + path::{Path, PathBuf}, + process::Stdio, + time::{Duration, Instant}, +}; +use tempfile::TempDir; +use tokio::{ + io::{AsyncBufReadExt, AsyncWriteExt, BufReader, Lines}, + process::{Child, ChildStdin, ChildStdout, Command}, +}; +use toml as _; +use tracing as _; +use tracing_subscriber as _; +use url::Url; + +const E2E_ENABLE_ENV: &str = "ADEQUATE_MCP_ENABLE_E2E"; +const RA_BINARY_ENV: &str = "ADEQUATE_MCP_RA_BINARY"; +const RESPONSE_TIMEOUT: Duration = Duration::from_secs(45); + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn live_fire_gauntlet_reaches_known_state() -> Result<(), Box<dyn Error>> { + if std::env::var_os(E2E_ENABLE_ENV).is_none() { + eprintln!("skipping e2e gauntlet: set {E2E_ENABLE_ENV}=1 to enable"); + return Ok(()); + } + + let ra_binary = std::env::var(RA_BINARY_ENV).unwrap_or_else(|_| "rust-analyzer".to_owned()); + if !rust_analyzer_available(ra_binary.as_str()).await { + eprintln!("skipping e2e gauntlet: rust-analyzer binary unavailable at `{ra_binary}`"); + return Ok(()); + } + + let fixture = Fixture::new()?; + let file_uri = fixture.main_uri()?; + let file_path = fixture.main_path().display().to_string(); + + let mut harness = McpHarness::spawn(fixture.workspace_root(), ra_binary.as_str()).await?; + harness.initialize().await?; + + let tools = harness.tools_list().await?; + assert!(tools.iter().any(|tool| tool == "advanced_lsp_request")); + assert!(tools.iter().any(|tool| tool == "rename_symbol")); + assert!(tools.iter().any(|tool| tool == "hover")); + assert!(tools.iter().any(|tool| tool == "references")); + assert!(tools.iter().any(|tool| tool == "clippy_diagnostics")); + assert!(tools.iter().any(|tool| tool == "fix_everything")); + + let diagnostics_tool = harness.tool_spec("diagnostics").await?; + let diagnostics_properties = diagnostics_tool + .get("inputSchema") + .and_then(|schema| schema.get("properties")) + .and_then(Value::as_object) + .ok_or_else(|| io::Error::other("diagnostics input schema missing properties"))?; + assert!(diagnostics_properties.contains_key("mode")); + assert!(diagnostics_properties.contains_key("render")); + assert!(!diagnostics_properties.contains_key("format")); + let diagnostics_render_schema = diagnostics_properties + .get("render") + .ok_or_else(|| io::Error::other("diagnostics render schema missing"))? + .to_string(); + assert!(diagnostics_render_schema.contains("\"porcelain\"")); + assert!(diagnostics_render_schema.contains("\"json\"")); + assert!(diagnostics_tool.get("outputSchema").is_some()); + + let clippy_tool = harness.tool_spec("clippy_diagnostics").await?; + let clippy_properties = clippy_tool + .get("inputSchema") + .and_then(|schema| schema.get("properties")) + .and_then(Value::as_object) + .ok_or_else(|| io::Error::other("clippy input schema missing properties"))?; + assert!(clippy_properties.contains_key("render")); + assert!(!clippy_properties.contains_key("format")); + + for tool_name in ["hover", "definition", "references"] { + let tool = harness.tool_spec(tool_name).await?; + let properties = tool + .get("inputSchema") + .and_then(|schema| schema.get("properties")) + .and_then(Value::as_object) + .ok_or_else(|| { + io::Error::other(format!("{tool_name} input schema missing properties")) + })?; + assert!(properties.contains_key("render")); + assert!(properties.contains_key("path_style")); + assert!(tool.get("outputSchema").is_some()); + } + + for tool_name in ["health_snapshot", "telemetry_snapshot"] { + let tool = harness.tool_spec(tool_name).await?; + let properties = tool + .get("inputSchema") + .and_then(|schema| schema.get("properties")) + .and_then(Value::as_object) + .ok_or_else(|| { + io::Error::other(format!("{tool_name} input schema missing properties")) + })?; + assert!(properties.contains_key("render")); + assert!(tool.get("outputSchema").is_some()); + } + + // Warm symbol index before semantic operations. + let symbols = harness + .call_tool_retrying_transient( + "advanced_lsp_request", + json!({ + "method": "documentSymbol", + "arguments": { + "textDocument": { "uri": file_uri } + } + }), + ) + .await?; + assert!(symbols.is_array() || symbols.is_object()); + + let definition_via_stringified_params = harness + .call_tool_retrying_transient( + "advanced_lsp_request", + json!({ + "method": "textDocument/definition", + "params": format!( + "{{\"textDocument\":{{\"uri\":\"{file_uri}\"}},\"position\":{{\"line\":6,\"character\":16}}}}" + ) + }), + ) + .await?; + assert!( + definition_via_stringified_params.is_array() + || definition_via_stringified_params.is_object() + ); + + for query in ["seed", "compute", "value"] { + let workspace_symbols = harness + .call_tool_retrying_transient( + "advanced_lsp_request", + json!({ + "method": "workspaceSymbol", + "params": { + "query": query + } + }), + ) + .await?; + assert!(workspace_symbols.is_array() || workspace_symbols.is_object()); + } + + let prepare = harness + .call_tool_retrying_transient( + "advanced_lsp_request", + json!({ + "method": "prepareRename", + "arguments": { + "textDocument": { "uri": file_uri }, + "position": { "line": 5, "character": 8 } + } + }), + ) + .await?; + assert!(!prepare.is_null()); + + // Exercise alias-heavy input normalization for rename. + let rename = harness + .call_tool_retrying_transient( + "rename_symbol", + json!({ + "filePath": file_uri, + "line": "6", + "character": 9, + "newName": "seed_value" + }), + ) + .await?; + let edits_applied = rename + .get("edits_applied") + .and_then(Value::as_u64) + .unwrap_or(0); + let files_touched = rename + .get("files_touched") + .and_then(Value::as_u64) + .unwrap_or(0); + assert!(edits_applied >= 2); + assert!(files_touched >= 1); + + let hover_default = harness + .call_tool_response_retrying_transient( + "hover", + json!({ + "file_path": file_path, + "line": 7, + "column": 30 + }), + ) + .await?; + assert!(hover_default.get("structuredContent").is_none()); + let hover_default_text = hover_default + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str) + .unwrap_or(""); + assert!(hover_default_text.contains("fn compute_seed(")); + assert!(!hover_default_text.trim_start().starts_with('{')); + + let hover = harness + .call_tool_retrying_transient( + "hover", + json!({ + "file_path": file_path, + "line": 7, + "column": 30, + "render": "json" + }), + ) + .await?; + assert!(hover.get("rendered").is_some()); + + let definition_default = harness + .call_tool_response_retrying_transient( + "definition", + json!({ + "file_path": file_path, + "line": 7, + "column": 30 + }), + ) + .await?; + assert!(definition_default.get("structuredContent").is_none()); + let definition_default_text = definition_default + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str) + .unwrap_or(""); + assert!(definition_default_text.contains("1 definition")); + assert!(!definition_default_text.trim_start().starts_with('{')); + + let definition = harness + .call_tool_retrying_transient( + "definition", + json!({ + "file_path": file_path, + "line": 7, + "column": 30, + "render": "json" + }), + ) + .await?; + let definition_count = definition + .get("locations") + .and_then(Value::as_array) + .map_or(0, Vec::len); + assert!(definition_count >= 1); + + let references_default = harness + .call_tool_response_retrying_transient( + "references", + json!({ + "file_path": file_path, + "line": 7, + "column": 30 + }), + ) + .await?; + assert!(references_default.get("structuredContent").is_none()); + let references_default_text = references_default + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str) + .unwrap_or(""); + assert!(references_default_text.contains("references")); + assert!(!references_default_text.trim_start().starts_with('{')); + + let references = harness + .call_tool_retrying_transient( + "references", + json!({ + "file_path": file_path, + "line": 7, + "column": 30, + "render": "json" + }), + ) + .await?; + let reference_count = references + .get("locations") + .and_then(Value::as_array) + .map_or(0, Vec::len); + assert!(reference_count >= 2); + + let diagnostics_default = harness + .call_tool_response_retrying_transient("diagnostics", json!({ "file_path": file_path })) + .await?; + assert!(diagnostics_default.get("structuredContent").is_none()); + let diagnostics_default_text = diagnostics_default + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str) + .unwrap_or(""); + assert!(diagnostics_default_text.contains("total")); + assert!(!diagnostics_default_text.trim_start().starts_with('{')); + + let diagnostics = harness + .call_tool_retrying_transient( + "diagnostics", + json!({ "file_path": file_path, "render": "json" }), + ) + .await?; + let compact_mode = diagnostics + .get("mode") + .and_then(Value::as_str) + .unwrap_or(""); + assert_eq!(compact_mode, "compact"); + let compact_items = diagnostics + .get("items") + .and_then(Value::as_array) + .map_or(0, Vec::len); + let compact_total = diagnostics + .get("counts") + .and_then(Value::as_object) + .and_then(|counts| counts.get("total_count")) + .and_then(Value::as_u64) + .unwrap_or(0); + assert_eq!(compact_total, compact_items as u64); + + let diagnostics_full = harness + .call_tool_retrying_transient( + "diagnostics", + json!({ "file_path": file_path, "mode": "full", "render": "json" }), + ) + .await?; + let full_mode = diagnostics_full + .get("mode") + .and_then(Value::as_str) + .unwrap_or(""); + assert_eq!(full_mode, "full"); + let full_items = diagnostics_full + .get("diagnostics") + .and_then(Value::as_array) + .cloned() + .unwrap_or_default(); + assert_eq!(compact_items, full_items.len()); + + let compact_entries = diagnostics + .get("items") + .and_then(Value::as_array) + .cloned() + .unwrap_or_default(); + assert_eq!(compact_entries.len(), full_items.len()); + for (compact_entry, full_entry) in compact_entries.iter().zip(full_items.iter()) { + let expected_severity = full_entry + .get("level") + .and_then(Value::as_str) + .unwrap_or("information"); + let full_range = full_entry + .get("range") + .cloned() + .unwrap_or_else(|| json!({})); + assert_eq!( + compact_entry + .get("severity") + .and_then(Value::as_str) + .unwrap_or(""), + expected_severity + ); + assert_eq!( + compact_entry + .get("file_path") + .and_then(Value::as_str) + .unwrap_or(""), + full_range + .get("file_path") + .and_then(Value::as_str) + .unwrap_or("") + ); + assert_eq!( + compact_entry + .get("start_line") + .and_then(Value::as_u64) + .unwrap_or(0), + full_range + .get("start") + .and_then(Value::as_object) + .and_then(|start| start.get("line")) + .and_then(Value::as_u64) + .unwrap_or(0) + ); + assert_eq!( + compact_entry + .get("start_column") + .and_then(Value::as_u64) + .unwrap_or(0), + full_range + .get("start") + .and_then(Value::as_object) + .and_then(|start| start.get("column")) + .and_then(Value::as_u64) + .unwrap_or(0) + ); + assert_eq!( + compact_entry + .get("end_line") + .and_then(Value::as_u64) + .unwrap_or(0), + full_range + .get("end") + .and_then(Value::as_object) + .and_then(|end| end.get("line")) + .and_then(Value::as_u64) + .unwrap_or(0) + ); + assert_eq!( + compact_entry + .get("end_column") + .and_then(Value::as_u64) + .unwrap_or(0), + full_range + .get("end") + .and_then(Value::as_object) + .and_then(|end| end.get("column")) + .and_then(Value::as_u64) + .unwrap_or(0) + ); + assert_eq!( + compact_entry.get("code").cloned().unwrap_or(Value::Null), + full_entry.get("code").cloned().unwrap_or(Value::Null) + ); + assert_eq!( + compact_entry + .get("message") + .and_then(Value::as_str) + .unwrap_or(""), + full_entry + .get("message") + .and_then(Value::as_str) + .unwrap_or("") + ); + } + + let diagnostics_batch = harness + .call_tool_retrying_transient( + "diagnostics", + json!({ "file_paths": [file_path, file_path], "mode": "full", "render": "json" }), + ) + .await?; + let diagnostics_batch_items = diagnostics_batch + .get("diagnostics") + .and_then(Value::as_array) + .map_or(0, Vec::len); + assert_eq!(diagnostics_batch_items, full_items.len().saturating_mul(2)); + + let clippy_default = harness + .call_tool_response("clippy_diagnostics", json!({ "file_path": file_path })) + .await?; + assert!(clippy_default.get("structuredContent").is_none()); + let clippy_default_text = clippy_default + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str) + .unwrap_or(""); + assert!(clippy_default_text.contains("total")); + assert!(!clippy_default_text.trim_start().starts_with('{')); + + let clippy_compact = harness + .call_tool_retrying_transient( + "clippy_diagnostics", + json!({ "file_path": file_path, "render": "json" }), + ) + .await?; + assert_eq!( + clippy_compact + .get("mode") + .and_then(Value::as_str) + .unwrap_or(""), + "compact" + ); + assert!(clippy_compact.get("counts").is_some()); + + let clippy_full = harness + .call_tool_retrying_transient( + "clippy_diagnostics", + json!({ "file_path": file_path, "mode": "full", "render": "json" }), + ) + .await?; + assert_eq!( + clippy_full + .get("mode") + .and_then(Value::as_str) + .unwrap_or(""), + "full" + ); + let clippy_total = clippy_compact + .get("counts") + .and_then(Value::as_object) + .and_then(|counts| counts.get("total_count")) + .and_then(Value::as_u64) + .unwrap_or(0); + let clippy_full_items = clippy_full + .get("diagnostics") + .and_then(Value::as_array) + .map_or(0, Vec::len); + assert_eq!(clippy_total as usize, clippy_full_items); + assert!(clippy_total >= 1); + + let health_default = harness + .call_tool_response_retrying_transient("health_snapshot", json!({})) + .await?; + assert!(health_default.get("structuredContent").is_none()); + let health_default_text = health_default + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str) + .unwrap_or(""); + assert!(health_default_text.contains("gen=")); + assert!(!health_default_text.trim_start().starts_with('{')); + + let health = harness + .call_tool_retrying_transient("health_snapshot", json!({ "render": "json" })) + .await?; + let generation = health + .get("generation") + .and_then(Value::as_u64) + .unwrap_or(0); + assert!(generation >= 1); + + let telemetry_default = harness + .call_tool_response_retrying_transient("telemetry_snapshot", json!({})) + .await?; + assert!(telemetry_default.get("structuredContent").is_none()); + let telemetry_default_text = telemetry_default + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str) + .unwrap_or(""); + assert!(telemetry_default_text.contains("totals requests=")); + assert!(!telemetry_default_text.trim_start().starts_with('{')); + + let telemetry = harness + .call_tool_retrying_transient("telemetry_snapshot", json!({ "render": "json" })) + .await?; + assert!(telemetry.get("totals").is_some()); + + harness.shutdown().await; + Ok(()) +} + +struct Fixture { + temp_dir: TempDir, + main_path: PathBuf, +} + +impl Fixture { + fn new() -> Result<Self, Box<dyn Error>> { + let temp_dir = tempfile::tempdir()?; + let src_dir = temp_dir.path().join("src"); + fs::create_dir_all(&src_dir)?; + + fs::write( + temp_dir.path().join("Cargo.toml"), + "[package]\nname = \"gauntlet_fixture\"\nversion = \"0.0.0\"\nedition = \"2024\"\n", + )?; + let main_path = src_dir.join("main.rs"); + fs::write( + &main_path, + "fn compute_seed(seed: i32) -> i32 {\n seed + 1\n}\n\nfn main() {\n let seed = 41;\n let value = compute_seed(seed);\n println!(\"{value}\");\n let unused = 0;\n}\n", + )?; + + Ok(Self { + temp_dir, + main_path, + }) + } + + fn workspace_root(&self) -> &Path { + self.temp_dir.path() + } + + fn main_path(&self) -> &Path { + self.main_path.as_path() + } + + fn main_uri(&self) -> Result<String, Box<dyn Error>> { + let url = Url::from_file_path(&self.main_path) + .map_err(|()| io::Error::other("fixture main.rs is not representable as file URI"))?; + Ok(url.to_string()) + } +} + +struct McpHarness { + child: Child, + stdin: ChildStdin, + stdout: Lines<BufReader<ChildStdout>>, + buffered_responses: HashMap<u64, Value>, + next_id: u64, +} + +impl Drop for McpHarness { + fn drop(&mut self) { + let _result = self.child.start_kill(); + } +} + +impl McpHarness { + async fn spawn(workspace_root: &Path, ra_binary: &str) -> Result<Self, Box<dyn Error>> { + let worker_binary = resolve_worker_binary()?; + let mut child = Command::new(worker_binary) + .arg("--worker") + .env("ADEQUATE_MCP_WORKSPACE_ROOT", workspace_root) + .env("ADEQUATE_MCP_RA_BINARY", ra_binary) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::null()) + .spawn()?; + + let stdin = child + .stdin + .take() + .ok_or_else(|| io::Error::other("worker stdin unavailable"))?; + let stdout = child + .stdout + .take() + .ok_or_else(|| io::Error::other("worker stdout unavailable"))?; + Ok(Self { + child, + stdin, + stdout: BufReader::new(stdout).lines(), + buffered_responses: HashMap::new(), + next_id: 1, + }) + } + + async fn initialize(&mut self) -> Result<(), Box<dyn Error>> { + let initialize_payload = json!({ + "protocolVersion": "2025-11-25", + "capabilities": {}, + "clientInfo": { + "name": "adequate-e2e-gauntlet", + "version": "1.0.0" + } + }); + let _result = self.request("initialize", initialize_payload).await?; + self.notify("notifications/initialized", json!({})).await?; + Ok(()) + } + + async fn tools_list(&mut self) -> Result<Vec<String>, Box<dyn Error>> { + let tools = self.tools_catalog().await?; + + let names = tools + .iter() + .filter_map(|tool| tool.get("name").and_then(Value::as_str)) + .map(str::to_owned) + .collect::<Vec<_>>(); + Ok(names) + } + + async fn tool_spec(&mut self, tool_name: &str) -> Result<Value, Box<dyn Error>> { + let tools = self.tools_catalog().await?; + tools + .into_iter() + .find(|tool| tool.get("name").and_then(Value::as_str) == Some(tool_name)) + .ok_or_else(|| io::Error::other(format!("tools/list missing `{tool_name}`")).into()) + } + + async fn tools_catalog(&mut self) -> Result<Vec<Value>, Box<dyn Error>> { + let response = self.request("tools/list", json!({})).await?; + let tools = response + .get("tools") + .and_then(Value::as_array) + .ok_or_else(|| io::Error::other("tools/list missing tools array"))?; + Ok(tools.clone()) + } + + async fn call_tool( + &mut self, + tool_name: &str, + arguments: Value, + ) -> Result<Value, Box<dyn Error>> { + let response = self.call_tool_response(tool_name, arguments).await?; + + if let Some(content) = response.get("structuredContent") { + return Ok(content.clone()); + } + + let text = response + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|entry| entry.get("text")) + .and_then(Value::as_str); + let Some(text) = text else { + return Ok(Value::Null); + }; + if let Ok(parsed) = serde_json::from_str::<Value>(text) { + return Ok(parsed); + } + Ok(Value::String(text.to_owned())) + } + + async fn call_tool_response( + &mut self, + tool_name: &str, + arguments: Value, + ) -> Result<Value, Box<dyn Error>> { + let response = self + .request( + "tools/call", + json!({ + "name": tool_name, + "arguments": arguments, + }), + ) + .await?; + + let is_error = response + .get("isError") + .and_then(Value::as_bool) + .unwrap_or(false); + if is_error { + return Err(Box::new(io::Error::other(format!( + "tool `{tool_name}` returned error payload: {response}" + )))); + } + Ok(response) + } + + async fn call_tool_response_retrying_transient( + &mut self, + tool_name: &str, + arguments: Value, + ) -> Result<Value, Box<dyn Error>> { + let mut attempt = 0_u8; + loop { + attempt = attempt.saturating_add(1); + let result = self.call_tool_response(tool_name, arguments.clone()).await; + match result { + Ok(response) => return Ok(response), + Err(error) => { + let message = error.to_string(); + let is_transient = is_transient_tool_error(message.as_str()); + if !is_transient || attempt >= 4 { + return Err(error); + } + tokio::time::sleep(Duration::from_millis(250)).await; + } + } + } + } + + async fn call_tool_retrying_transient( + &mut self, + tool_name: &str, + arguments: Value, + ) -> Result<Value, Box<dyn Error>> { + let mut attempt = 0_u8; + loop { + attempt = attempt.saturating_add(1); + let result = self.call_tool(tool_name, arguments.clone()).await; + match result { + Ok(payload) => return Ok(payload), + Err(error) => { + let message = error.to_string(); + let is_transient = is_transient_tool_error(message.as_str()); + if !is_transient || attempt >= 4 { + return Err(error); + } + tokio::time::sleep(Duration::from_millis(250)).await; + } + } + } + } + + async fn notify(&mut self, method: &str, params: Value) -> Result<(), Box<dyn Error>> { + let payload = json!({ + "jsonrpc": "2.0", + "method": method, + "params": params, + }); + let serialized = serde_json::to_vec(&payload)?; + self.stdin.write_all(&serialized).await?; + self.stdin.write_all(b"\n").await?; + self.stdin.flush().await?; + Ok(()) + } + + async fn request(&mut self, method: &str, params: Value) -> Result<Value, Box<dyn Error>> { + let request_id = self.next_id; + self.next_id = self.next_id.saturating_add(1); + + let payload = json!({ + "jsonrpc": "2.0", + "id": request_id, + "method": method, + "params": params, + }); + let serialized = serde_json::to_vec(&payload)?; + self.stdin.write_all(&serialized).await?; + self.stdin.write_all(b"\n").await?; + self.stdin.flush().await?; + + let response = self.read_response(request_id).await?; + if let Some(error) = response.get("error") { + return Err(Box::new(io::Error::other(format!( + "json-rpc error for method `{method}`: {error}" + )))); + } + let result = response + .get("result") + .cloned() + .ok_or_else(|| io::Error::other("json-rpc response missing result"))?; + Ok(result) + } + + async fn read_response(&mut self, request_id: u64) -> Result<Value, Box<dyn Error>> { + if let Some(buffered) = self.buffered_responses.remove(&request_id) { + return Ok(buffered); + } + + let deadline = Instant::now() + RESPONSE_TIMEOUT; + loop { + if Instant::now() >= deadline { + return Err(Box::new(io::Error::new( + io::ErrorKind::TimedOut, + format!("timed out waiting for response id {request_id}"), + ))); + } + let remaining = deadline.saturating_duration_since(Instant::now()); + let next_line = tokio::time::timeout(remaining, self.stdout.next_line()).await; + let line = match next_line { + Ok(Ok(Some(line))) => line, + Ok(Ok(None)) => { + return Err(Box::new(io::Error::new( + io::ErrorKind::UnexpectedEof, + "worker stdout closed while awaiting response", + ))); + } + Ok(Err(error)) => return Err(Box::new(error)), + Err(_elapsed) => { + return Err(Box::new(io::Error::new( + io::ErrorKind::TimedOut, + format!("timed out waiting for response id {request_id}"), + ))); + } + }; + + let parsed = serde_json::from_str::<Value>(&line); + let Ok(message) = parsed else { + continue; + }; + let response_id = message.get("id").and_then(Value::as_u64); + let Some(response_id) = response_id else { + continue; + }; + + if response_id == request_id { + return Ok(message); + } + let _existing = self.buffered_responses.insert(response_id, message); + } + } + + async fn shutdown(&mut self) { + let _kill_result = self.child.kill().await; + let _wait_result = self.child.wait().await; + } +} + +fn is_transient_tool_error(message: &str) -> bool { + message.contains("\"kind\":\"transient_retryable\"") + || message.contains("code=-32801") + || message.contains("content modified") + || message.contains("document changed") +} + +fn resolve_worker_binary() -> Result<PathBuf, Box<dyn Error>> { + if let Ok(path) = std::env::var("CARGO_BIN_EXE_adequate-rust-mcp") { + return Ok(PathBuf::from(path)); + } + + let current = std::env::current_exe()?; + let deps_dir = current + .parent() + .ok_or_else(|| io::Error::other("failed to find integration test deps directory"))?; + let target_dir = deps_dir + .parent() + .ok_or_else(|| io::Error::other("failed to resolve target debug directory"))?; + Ok(target_dir.join("adequate-rust-mcp")) +} + +async fn rust_analyzer_available(binary: &str) -> bool { + let status = Command::new(binary) + .arg("--version") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + .await; + matches!(status, Ok(status) if status.success()) +} +use libmcp as _; diff --git a/crates/adequate-rust-mcp/tests/host_inflight_replay.rs b/crates/adequate-rust-mcp/tests/host_inflight_replay.rs new file mode 100644 index 0000000..17088d6 --- /dev/null +++ b/crates/adequate-rust-mcp/tests/host_inflight_replay.rs @@ -0,0 +1,657 @@ +//! Host-level replay test: in-flight requests must survive worker hot-swap. + +use notify as _; +use ra_mcp_domain as _; +use ra_mcp_engine as _; +use rmcp as _; +use schemars as _; +use serde as _; +use serde_json::{Value, json}; +use serial_test::serial; +use std::{ + collections::HashMap, + error::Error, + fs, io, + path::{Path, PathBuf}, + process::Stdio, + time::{Duration, Instant}, +}; +use tempfile::TempDir; +use tokio::{ + io::{AsyncBufReadExt, AsyncWriteExt, BufReader, Lines}, + process::{Child, ChildStdin, ChildStdout, Command}, +}; +use toml as _; +use tracing as _; +use tracing_subscriber as _; +use url as _; + +const RESPONSE_TIMEOUT: Duration = Duration::from_secs(20); + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn host_replays_inflight_request_during_hot_swap() -> Result<(), Box<dyn Error>> { + let fixture = Fixture::new()?; + let host_binary = resolve_host_binary()?; + let fake_ra_binary = resolve_fake_ra_binary()?; + if !fake_ra_binary.exists() { + eprintln!( + "skipping host replay test: fake-rust-analyzer missing at {}", + fake_ra_binary.display() + ); + return Ok(()); + } + + let worker_binary = fixture.path().join("adequate-rust-mcp-worker"); + replace_worker_binary(host_binary.as_path(), worker_binary.as_path())?; + + let fake_wrapper = fixture.path().join("fake-ra-wrapper.sh"); + let telemetry_state_home = fixture.path().join("state-home"); + let telemetry_path = telemetry_state_home + .join("adequate-rust-mcp") + .join("telemetry.jsonl"); + let wrapper_config = FakeRaWrapperConfig { + hover_delay_ms: Some(1200), + execute_command_delay_ms: None, + execute_command_log_path: None, + }; + write_fake_ra_wrapper( + fake_wrapper.as_path(), + fake_ra_binary.as_path(), + &wrapper_config, + )?; + + let mut harness = HostHarness::spawn( + host_binary.as_path(), + worker_binary.as_path(), + fixture.path(), + fake_wrapper.as_path(), + telemetry_state_home.as_path(), + ) + .await?; + harness.initialize().await?; + + let hover_id = harness.next_request_id(); + harness + .send_request_with_id( + hover_id, + "tools/call", + json!({ + "name": "hover", + "arguments": { + "file_path": fixture.source_file().display().to_string(), + "line": 1, + "column": 1, + "render": "json" + } + }), + ) + .await?; + + tokio::time::sleep(Duration::from_millis(120)).await; + replace_worker_binary(host_binary.as_path(), worker_binary.as_path())?; + + let response = harness.read_response(hover_id).await?; + assert!( + response.get("error").is_none(), + "expected replayed success, got error response: {response}", + ); + + let result = response + .get("result") + .ok_or_else(|| io::Error::other("missing result in response"))?; + let is_error = result + .get("isError") + .and_then(Value::as_bool) + .unwrap_or(false); + assert!(!is_error, "tool response marked as error: {result}"); + + let rendered = result + .get("structuredContent") + .and_then(|payload| payload.get("rendered")) + .and_then(Value::as_str); + assert_eq!(rendered, Some("hover::ok")); + + harness.shutdown().await; + + let telemetry_events = read_jsonl_events(telemetry_path.as_path())?; + assert!( + telemetry_events + .iter() + .any(|event| event.get("event").and_then(Value::as_str) == Some("tool_call")), + "expected tool_call telemetry event", + ); + assert!( + telemetry_events + .iter() + .any(|event| event.get("event").and_then(Value::as_str) == Some("hot_paths_snapshot")), + "expected hot_paths_snapshot telemetry event", + ); + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn host_replays_probe_required_request_during_hot_swap() -> Result<(), Box<dyn Error>> { + let fixture = Fixture::new()?; + let host_binary = resolve_host_binary()?; + let fake_ra_binary = resolve_fake_ra_binary()?; + if !fake_ra_binary.exists() { + eprintln!( + "skipping host probe-required replay test: fake-rust-analyzer missing at {}", + fake_ra_binary.display() + ); + return Ok(()); + } + + let worker_binary = fixture.path().join("adequate-rust-mcp-worker"); + replace_worker_binary(host_binary.as_path(), worker_binary.as_path())?; + + let effect_log_path = fixture.path().join("execute-command-effects.log"); + let telemetry_state_home = fixture.path().join("state-home"); + let telemetry_path = telemetry_state_home + .join("adequate-rust-mcp") + .join("telemetry.jsonl"); + let fake_wrapper = fixture.path().join("fake-ra-wrapper-probe.sh"); + let wrapper_config = FakeRaWrapperConfig { + hover_delay_ms: None, + execute_command_delay_ms: Some(1200), + execute_command_log_path: Some(effect_log_path.clone()), + }; + write_fake_ra_wrapper( + fake_wrapper.as_path(), + fake_ra_binary.as_path(), + &wrapper_config, + )?; + + let mut harness = HostHarness::spawn( + host_binary.as_path(), + worker_binary.as_path(), + fixture.path(), + fake_wrapper.as_path(), + telemetry_state_home.as_path(), + ) + .await?; + harness.initialize().await?; + + let request_id = harness.next_request_id(); + harness + .send_request_with_id( + request_id, + "tools/call", + json!({ + "name": "advanced_lsp_request", + "arguments": { + "method": "execute_command", + "arguments": { + "command": "probe.required.synthetic", + "arguments": [] + } + } + }), + ) + .await?; + + tokio::time::sleep(Duration::from_millis(300)).await; + replace_worker_binary(host_binary.as_path(), worker_binary.as_path())?; + + let response = harness.read_response(request_id).await?; + assert!( + response.get("error").is_none(), + "expected replayed success for probe-required request, got: {response}", + ); + let result = response + .get("result") + .ok_or_else(|| io::Error::other("missing result in response"))?; + let is_error = result + .get("isError") + .and_then(Value::as_bool) + .unwrap_or(false); + assert!(!is_error, "tool response marked as error: {result}"); + + let effect_log = fs::read_to_string(effect_log_path)?; + let effect_count = effect_log.lines().count(); + assert!( + effect_count >= 2, + "expected at-least-once replay to duplicate probe-required effect under forced restart", + ); + + harness.shutdown().await; + + let telemetry_events = read_jsonl_events(telemetry_path.as_path())?; + let probe_tool_event = telemetry_events.iter().find(|event| { + event.get("event").and_then(Value::as_str) == Some("tool_call") + && event.get("tool_name").and_then(Value::as_str) == Some("advanced_lsp_request") + }); + assert!( + probe_tool_event.is_some(), + "expected probe-required tool telemetry" + ); + let probe_tool_event = match probe_tool_event { + Some(value) => value, + None => return Ok(()), + }; + let replay_attempts = probe_tool_event + .get("replay_attempts") + .and_then(Value::as_u64) + .unwrap_or(0); + assert!(replay_attempts >= 1); + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn host_logs_wrong_arg_failures_with_path_telemetry() -> Result<(), Box<dyn Error>> { + let fixture = Fixture::new()?; + let host_binary = resolve_host_binary()?; + let fake_ra_binary = resolve_fake_ra_binary()?; + if !fake_ra_binary.exists() { + eprintln!( + "skipping host telemetry failure test: fake-rust-analyzer missing at {}", + fake_ra_binary.display() + ); + return Ok(()); + } + + let worker_binary = fixture.path().join("adequate-rust-mcp-worker"); + replace_worker_binary(host_binary.as_path(), worker_binary.as_path())?; + + let fake_wrapper = fixture.path().join("fake-ra-wrapper-telemetry.sh"); + let telemetry_state_home = fixture.path().join("state-home"); + let telemetry_path = telemetry_state_home + .join("adequate-rust-mcp") + .join("telemetry.jsonl"); + let wrapper_config = FakeRaWrapperConfig { + hover_delay_ms: None, + execute_command_delay_ms: None, + execute_command_log_path: None, + }; + write_fake_ra_wrapper( + fake_wrapper.as_path(), + fake_ra_binary.as_path(), + &wrapper_config, + )?; + + let mut harness = HostHarness::spawn( + host_binary.as_path(), + worker_binary.as_path(), + fixture.path(), + fake_wrapper.as_path(), + telemetry_state_home.as_path(), + ) + .await?; + harness.initialize().await?; + + let invalid_request_id = harness.next_request_id(); + harness + .send_request_with_id( + invalid_request_id, + "tools/call", + json!({ + "name": "hover", + "arguments": { + "file_path": fixture.source_file().display().to_string(), + "line": "definitely-not-a-number", + "column": 1 + } + }), + ) + .await?; + + let response = harness.read_response(invalid_request_id).await?; + assert!( + response.get("error").is_some(), + "expected invalid params error response, got: {response}", + ); + + harness.shutdown().await; + + let source_path = fixture.source_file().display().to_string(); + let telemetry_events = read_jsonl_events(telemetry_path.as_path())?; + let tool_event = telemetry_events.iter().find(|event| { + event.get("event").and_then(Value::as_str) == Some("tool_call") + && event.get("request_id") == Some(&json!(invalid_request_id)) + }); + assert!( + tool_event.is_some(), + "expected telemetry tool_call for invalid request", + ); + let tool_event = match tool_event { + Some(value) => value, + None => return Ok(()), + }; + assert_eq!( + tool_event.get("outcome").and_then(Value::as_str), + Some("error") + ); + assert_eq!( + tool_event.get("path_hint").and_then(Value::as_str), + Some(source_path.as_str()), + ); + assert!( + tool_event + .get("error_message") + .and_then(Value::as_str) + .is_some_and(|message| !message.trim().is_empty()), + "expected non-empty error message for invalid args telemetry", + ); + + let snapshot_contains_path = telemetry_events + .iter() + .filter(|event| event.get("event").and_then(Value::as_str) == Some("hot_paths_snapshot")) + .flat_map(|event| { + event + .get("hottest_paths") + .and_then(Value::as_array) + .cloned() + .unwrap_or_default() + }) + .any(|path_line| { + path_line.get("path").and_then(Value::as_str) == Some(source_path.as_str()) + && path_line + .get("error_count") + .and_then(Value::as_u64) + .is_some_and(|count| count >= 1) + }); + assert!( + snapshot_contains_path, + "expected hot path snapshots to include errored source path", + ); + + Ok(()) +} + +struct Fixture { + temp_dir: TempDir, + source_file: PathBuf, +} + +impl Fixture { + fn new() -> Result<Self, Box<dyn Error>> { + let temp_dir = tempfile::tempdir()?; + let src = temp_dir.path().join("src"); + fs::create_dir_all(&src)?; + fs::write( + temp_dir.path().join("Cargo.toml"), + "[package]\nname = \"host_replay_fixture\"\nversion = \"0.0.0\"\nedition = \"2024\"\n", + )?; + let source_file = src.join("lib.rs"); + fs::write(&source_file, "pub fn compute() -> i32 { 1 }\n")?; + Ok(Self { + temp_dir, + source_file, + }) + } + + fn path(&self) -> &Path { + self.temp_dir.path() + } + + fn source_file(&self) -> &Path { + self.source_file.as_path() + } +} + +struct HostHarness { + child: Child, + stdin: ChildStdin, + stdout: Lines<BufReader<ChildStdout>>, + buffered_responses: HashMap<u64, Value>, + next_id: u64, +} + +impl Drop for HostHarness { + fn drop(&mut self) { + let _ = self.child.start_kill(); + } +} + +impl HostHarness { + async fn spawn( + host_binary: &Path, + worker_binary: &Path, + workspace_root: &Path, + fake_ra_binary: &Path, + telemetry_state_home: &Path, + ) -> Result<Self, Box<dyn Error>> { + let mut child = Command::new(host_binary) + .env("ADEQUATE_MCP_WORKER_BINARY", worker_binary) + .env("ADEQUATE_MCP_WORKSPACE_ROOT", workspace_root) + .env("ADEQUATE_MCP_RA_BINARY", fake_ra_binary) + .env("XDG_STATE_HOME", telemetry_state_home) + .env("ADEQUATE_MCP_TELEMETRY_SNAPSHOT_EVERY", "1") + .env("ADEQUATE_MCP_HOST_RELOAD_DEBOUNCE_MS", "0") + .env("ADEQUATE_MCP_HOST_RESPAWN_FLOOR_MS", "10") + .env("ADEQUATE_MCP_HOST_RESPAWN_CEILING_MS", "20") + .env("ADEQUATE_MCP_HOST_MAX_REPLAY_ATTEMPTS", "8") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::null()) + .spawn()?; + + let stdin = child + .stdin + .take() + .ok_or_else(|| io::Error::other("host stdin unavailable"))?; + let stdout = child + .stdout + .take() + .ok_or_else(|| io::Error::other("host stdout unavailable"))?; + Ok(Self { + child, + stdin, + stdout: BufReader::new(stdout).lines(), + buffered_responses: HashMap::new(), + next_id: 1, + }) + } + + async fn initialize(&mut self) -> Result<(), Box<dyn Error>> { + let _result = self + .request( + "initialize", + json!({ + "protocolVersion": "2025-11-25", + "capabilities": {}, + "clientInfo": { + "name": "host-replay-test", + "version": "1.0.0" + } + }), + ) + .await?; + self.notify("notifications/initialized", json!({})).await?; + Ok(()) + } + + fn next_request_id(&mut self) -> u64 { + let id = self.next_id; + self.next_id = self.next_id.saturating_add(1); + id + } + + async fn send_request_with_id( + &mut self, + id: u64, + method: &str, + params: Value, + ) -> Result<(), Box<dyn Error>> { + let payload = json!({ + "jsonrpc": "2.0", + "id": id, + "method": method, + "params": params, + }); + self.write_message(&payload).await + } + + async fn request(&mut self, method: &str, params: Value) -> Result<Value, Box<dyn Error>> { + let id = self.next_request_id(); + self.send_request_with_id(id, method, params).await?; + self.read_response(id).await + } + + async fn notify(&mut self, method: &str, params: Value) -> Result<(), Box<dyn Error>> { + let payload = json!({ + "jsonrpc": "2.0", + "method": method, + "params": params, + }); + self.write_message(&payload).await + } + + async fn write_message(&mut self, message: &Value) -> Result<(), Box<dyn Error>> { + let serialized = serde_json::to_vec(message)?; + self.stdin.write_all(&serialized).await?; + self.stdin.write_all(b"\n").await?; + self.stdin.flush().await?; + Ok(()) + } + + async fn read_response(&mut self, request_id: u64) -> Result<Value, Box<dyn Error>> { + if let Some(buffered) = self.buffered_responses.remove(&request_id) { + return Ok(buffered); + } + let deadline = Instant::now() + RESPONSE_TIMEOUT; + loop { + if Instant::now() >= deadline { + return Err(Box::new(io::Error::new( + io::ErrorKind::TimedOut, + format!("timed out waiting for response id {request_id}"), + ))); + } + let remaining = deadline.saturating_duration_since(Instant::now()); + let next_line = tokio::time::timeout(remaining, self.stdout.next_line()).await; + let line = match next_line { + Ok(Ok(Some(line))) => line, + Ok(Ok(None)) => { + return Err(Box::new(io::Error::new( + io::ErrorKind::UnexpectedEof, + "host stdout closed while awaiting response", + ))); + } + Ok(Err(error)) => return Err(Box::new(error)), + Err(_) => { + return Err(Box::new(io::Error::new( + io::ErrorKind::TimedOut, + format!("timed out waiting for response id {request_id}"), + ))); + } + }; + + let parsed = serde_json::from_str::<Value>(&line); + let Ok(message) = parsed else { + continue; + }; + let response_id = message.get("id").and_then(Value::as_u64); + let Some(response_id) = response_id else { + continue; + }; + if response_id == request_id { + return Ok(message); + } + let _previous = self.buffered_responses.insert(response_id, message); + } + } + + async fn shutdown(&mut self) { + let _ = self.child.kill().await; + let _ = self.child.wait().await; + } +} + +fn replace_worker_binary(source_binary: &Path, target_binary: &Path) -> io::Result<()> { + let staging_path = target_binary.with_extension("next"); + let _copied_bytes = fs::copy(source_binary, &staging_path)?; + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + + let permissions = fs::Permissions::from_mode(0o755); + fs::set_permissions(&staging_path, permissions)?; + } + fs::rename(staging_path, target_binary)?; + Ok(()) +} + +#[derive(Debug, Clone)] +struct FakeRaWrapperConfig { + hover_delay_ms: Option<u64>, + execute_command_delay_ms: Option<u64>, + execute_command_log_path: Option<PathBuf>, +} + +fn write_fake_ra_wrapper( + script_path: &Path, + fake_ra_binary: &Path, + config: &FakeRaWrapperConfig, +) -> io::Result<()> { + let mut args = vec!["--mode stable".to_owned()]; + if let Some(delay_ms) = config.hover_delay_ms { + args.push(format!("--hover-delay-ms {delay_ms}")); + } + if let Some(delay_ms) = config.execute_command_delay_ms { + args.push(format!("--execute-command-delay-ms {delay_ms}")); + } + if let Some(path) = config.execute_command_log_path.as_ref() { + args.push(format!("--execute-command-log \"{}\"", path.display())); + } + + let script = format!( + "#!/usr/bin/env bash\nexec \"{}\" {}\n", + fake_ra_binary.display(), + args.join(" ") + ); + fs::write(script_path, script)?; + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + + let permissions = fs::Permissions::from_mode(0o755); + fs::set_permissions(script_path, permissions)?; + } + Ok(()) +} + +fn resolve_host_binary() -> Result<PathBuf, Box<dyn Error>> { + if let Ok(path) = std::env::var("CARGO_BIN_EXE_adequate-rust-mcp") { + return Ok(PathBuf::from(path)); + } + let current = std::env::current_exe()?; + let deps_dir = current + .parent() + .ok_or_else(|| io::Error::other("failed to resolve integration test deps directory"))?; + let target_debug = deps_dir + .parent() + .ok_or_else(|| io::Error::other("failed to resolve target debug directory"))?; + Ok(target_debug.join("adequate-rust-mcp")) +} + +fn resolve_fake_ra_binary() -> Result<PathBuf, Box<dyn Error>> { + if let Ok(path) = std::env::var("CARGO_BIN_EXE_fake-rust-analyzer") { + return Ok(PathBuf::from(path)); + } + if let Ok(path) = std::env::var("CARGO_BIN_EXE_fake_rust_analyzer") { + return Ok(PathBuf::from(path)); + } + let current = std::env::current_exe()?; + let deps_dir = current + .parent() + .ok_or_else(|| io::Error::other("failed to resolve integration test deps directory"))?; + let target_debug = deps_dir + .parent() + .ok_or_else(|| io::Error::other("failed to resolve target debug directory"))?; + Ok(target_debug.join("fake-rust-analyzer")) +} + +fn read_jsonl_events(path: &Path) -> Result<Vec<Value>, Box<dyn Error>> { + let raw = fs::read_to_string(path)?; + let events = raw + .lines() + .filter(|line| !line.trim().is_empty()) + .map(serde_json::from_str::<Value>) + .collect::<Result<Vec<_>, _>>()?; + Ok(events) +} +use libmcp as _; diff --git a/crates/adequate-rust-mcp/tests/worktree_workspace_rebind.rs b/crates/adequate-rust-mcp/tests/worktree_workspace_rebind.rs new file mode 100644 index 0000000..ca61753 --- /dev/null +++ b/crates/adequate-rust-mcp/tests/worktree_workspace_rebind.rs @@ -0,0 +1,383 @@ +//! Integration test for rebinding worker engines to sibling git worktrees. + +use notify as _; +use ra_mcp_domain as _; +use ra_mcp_engine as _; +use rmcp as _; +use schemars as _; +use serde as _; +use serde_json::{Value, json}; +use serial_test::serial; +use std::{ + error::Error, + fs, io, + path::{Path, PathBuf}, + process::Stdio, + time::{Duration, Instant}, +}; +use tempfile::TempDir; +use tokio::{ + io::{AsyncBufReadExt, AsyncWriteExt, BufReader, Lines}, + process::{Child, ChildStdin, ChildStdout, Command}, +}; +use toml as _; +use tracing as _; +use tracing_subscriber as _; +use url as _; + +const RESPONSE_TIMEOUT: Duration = Duration::from_secs(20); + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn diagnostics_rebind_to_sibling_git_worktree_root() -> Result<(), Box<dyn Error>> { + let fixture = Fixture::new()?; + let worker_binary = resolve_worker_binary()?; + let fake_ra_binary = resolve_fake_ra_binary()?; + if !fake_ra_binary.exists() { + eprintln!( + "skipping worktree rebind test: fake-rust-analyzer missing at {}", + fake_ra_binary.display() + ); + return Ok(()); + } + + let fake_wrapper = fixture.base_dir().join("fake-ra-worktree.sh"); + write_fake_ra_wrapper(fake_wrapper.as_path(), fake_ra_binary.as_path())?; + + let mut harness = WorkerHarness::spawn( + worker_binary.as_path(), + fixture.repo_root(), + fake_wrapper.as_path(), + ) + .await?; + harness.initialize().await?; + + let diagnostics = harness + .call_tool( + "diagnostics", + json!({ + "file_path": fixture.worktree_source().display().to_string(), + "render": "json", + "mode": "full", + "path_style": "absolute" + }), + ) + .await?; + let items = diagnostics + .get("diagnostics") + .and_then(Value::as_array) + .ok_or_else(|| io::Error::other("diagnostics payload missing diagnostics array"))?; + assert_eq!(items.len(), 1); + let message = items[0] + .get("message") + .and_then(Value::as_str) + .ok_or_else(|| io::Error::other("diagnostic missing message"))?; + assert_eq!(message, "fake diagnostic"); + let code = items[0].get("code").cloned().unwrap_or(Value::Null); + assert_eq!(code, Value::Null); + + harness.shutdown().await; + Ok(()) +} + +struct Fixture { + temp_dir: TempDir, + repo_root: PathBuf, + worktree_root: PathBuf, + worktree_source: PathBuf, +} + +impl Fixture { + fn new() -> Result<Self, Box<dyn Error>> { + let temp_dir = tempfile::tempdir()?; + let base_dir = temp_dir.path().to_path_buf(); + let repo_root = base_dir.join("repo"); + fs::create_dir_all(repo_root.join("src"))?; + fs::write(repo_root.join("Cargo.toml"), fixture_manifest())?; + fs::write(repo_root.join("src/lib.rs"), fixture_source())?; + + run_git(["init", "-q"], repo_root.as_path())?; + run_git( + ["config", "user.email", "adequate@example.com"], + repo_root.as_path(), + )?; + run_git(["config", "user.name", "adequate"], repo_root.as_path())?; + run_git(["add", "."], repo_root.as_path())?; + run_git(["commit", "-qm", "fixture"], repo_root.as_path())?; + + let worktree_root = base_dir.join("repo-worktree"); + run_git( + [ + "worktree", + "add", + "-q", + worktree_root.to_str().ok_or_else(|| { + io::Error::other("worktree path is not valid UTF-8 for git invocation") + })?, + "-b", + "worktree-rebind-test", + ], + repo_root.as_path(), + )?; + let worktree_source = worktree_root.join("src/lib.rs"); + + Ok(Self { + temp_dir, + repo_root, + worktree_root, + worktree_source, + }) + } + + fn base_dir(&self) -> &Path { + self.temp_dir.path() + } + + fn repo_root(&self) -> &Path { + self.repo_root.as_path() + } + + fn worktree_source(&self) -> &Path { + self.worktree_source.as_path() + } + + #[allow(dead_code)] + fn worktree_root(&self) -> &Path { + self.worktree_root.as_path() + } +} + +struct WorkerHarness { + child: Child, + stdin: ChildStdin, + stdout: Lines<BufReader<ChildStdout>>, + next_id: u64, +} + +impl Drop for WorkerHarness { + fn drop(&mut self) { + let _ = self.child.start_kill(); + } +} + +impl WorkerHarness { + async fn spawn( + worker_binary: &Path, + workspace_root: &Path, + fake_ra_binary: &Path, + ) -> Result<Self, Box<dyn Error>> { + let mut child = Command::new(worker_binary) + .arg("--worker") + .env("ADEQUATE_MCP_WORKSPACE_ROOT", workspace_root) + .env("ADEQUATE_MCP_RA_BINARY", fake_ra_binary) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::null()) + .spawn()?; + + let stdin = child + .stdin + .take() + .ok_or_else(|| io::Error::other("worker stdin unavailable"))?; + let stdout = child + .stdout + .take() + .ok_or_else(|| io::Error::other("worker stdout unavailable"))?; + Ok(Self { + child, + stdin, + stdout: BufReader::new(stdout).lines(), + next_id: 1, + }) + } + + async fn initialize(&mut self) -> Result<(), Box<dyn Error>> { + let _ = self + .request( + "initialize", + json!({ + "protocolVersion": "2025-11-25", + "capabilities": {}, + "clientInfo": { + "name": "worktree-rebind-test", + "version": "1.0.0" + } + }), + ) + .await?; + self.notify("notifications/initialized", json!({})).await?; + Ok(()) + } + + async fn call_tool( + &mut self, + tool_name: &str, + arguments: Value, + ) -> Result<Value, Box<dyn Error>> { + let response = self + .request( + "tools/call", + json!({ + "name": tool_name, + "arguments": arguments, + }), + ) + .await?; + let result = response + .get("result") + .ok_or_else(|| io::Error::other("tool response missing result"))?; + let is_error = result + .get("isError") + .and_then(Value::as_bool) + .unwrap_or(false); + if is_error { + return Err(Box::new(io::Error::other(format!( + "tool `{tool_name}` returned error payload: {result}" + )))); + } + Ok(result + .get("structuredContent") + .cloned() + .unwrap_or(Value::Null)) + } + + async fn request(&mut self, method: &str, params: Value) -> Result<Value, Box<dyn Error>> { + let id = self.next_id; + self.next_id = self.next_id.saturating_add(1); + let payload = json!({ + "jsonrpc": "2.0", + "id": id, + "method": method, + "params": params, + }); + self.write_message(&payload).await?; + self.read_response(id).await + } + + async fn notify(&mut self, method: &str, params: Value) -> Result<(), Box<dyn Error>> { + let payload = json!({ + "jsonrpc": "2.0", + "method": method, + "params": params, + }); + self.write_message(&payload).await + } + + async fn write_message(&mut self, message: &Value) -> Result<(), Box<dyn Error>> { + let serialized = serde_json::to_vec(message)?; + self.stdin.write_all(&serialized).await?; + self.stdin.write_all(b"\n").await?; + self.stdin.flush().await?; + Ok(()) + } + + async fn read_response(&mut self, request_id: u64) -> Result<Value, Box<dyn Error>> { + let deadline = Instant::now() + RESPONSE_TIMEOUT; + loop { + if Instant::now() >= deadline { + return Err(Box::new(io::Error::new( + io::ErrorKind::TimedOut, + format!("timed out waiting for response id {request_id}"), + ))); + } + let remaining = deadline.saturating_duration_since(Instant::now()); + let next_line = tokio::time::timeout(remaining, self.stdout.next_line()).await; + let line = match next_line { + Ok(Ok(Some(line))) => line, + Ok(Ok(None)) => { + return Err(Box::new(io::Error::new( + io::ErrorKind::UnexpectedEof, + "worker stdout closed while awaiting response", + ))); + } + Ok(Err(error)) => return Err(Box::new(error)), + Err(_) => { + return Err(Box::new(io::Error::new( + io::ErrorKind::TimedOut, + format!("timed out waiting for response id {request_id}"), + ))); + } + }; + + let message = serde_json::from_str::<Value>(&line)?; + if message.get("id").and_then(Value::as_u64) == Some(request_id) { + return Ok(message); + } + } + } + + async fn shutdown(&mut self) { + let _ = self.child.kill().await; + let _ = self.child.wait().await; + } +} + +fn fixture_manifest() -> &'static str { + "[package]\nname = \"worktree_rebind_fixture\"\nversion = \"0.0.0\"\nedition = \"2024\"\n" +} + +fn fixture_source() -> &'static str { + "pub fn compute() -> i32 { 1 }\n" +} + +fn run_git<const N: usize>(args: [&str; N], cwd: &Path) -> Result<(), Box<dyn Error>> { + let status = std::process::Command::new("git") + .args(args) + .current_dir(cwd) + .status()?; + if !status.success() { + return Err(Box::new(io::Error::other(format!( + "git command failed in {}", + cwd.display() + )))); + } + Ok(()) +} + +fn write_fake_ra_wrapper(script_path: &Path, fake_ra_binary: &Path) -> io::Result<()> { + let script = format!( + "#!/usr/bin/env bash\nexec \"{}\" --mode stable --strict-root-match\n", + fake_ra_binary.display() + ); + fs::write(script_path, script)?; + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + + let permissions = fs::Permissions::from_mode(0o755); + fs::set_permissions(script_path, permissions)?; + } + Ok(()) +} + +fn resolve_worker_binary() -> Result<PathBuf, Box<dyn Error>> { + if let Ok(path) = std::env::var("CARGO_BIN_EXE_adequate-rust-mcp") { + return Ok(PathBuf::from(path)); + } + let current = std::env::current_exe()?; + let deps_dir = current + .parent() + .ok_or_else(|| io::Error::other("failed to resolve integration test deps directory"))?; + let target_debug = deps_dir + .parent() + .ok_or_else(|| io::Error::other("failed to resolve target debug directory"))?; + Ok(target_debug.join("adequate-rust-mcp")) +} + +fn resolve_fake_ra_binary() -> Result<PathBuf, Box<dyn Error>> { + if let Ok(path) = std::env::var("CARGO_BIN_EXE_fake-rust-analyzer") { + return Ok(PathBuf::from(path)); + } + if let Ok(path) = std::env::var("CARGO_BIN_EXE_fake_rust_analyzer") { + return Ok(PathBuf::from(path)); + } + let current = std::env::current_exe()?; + let deps_dir = current + .parent() + .ok_or_else(|| io::Error::other("failed to resolve integration test deps directory"))?; + let target_debug = deps_dir + .parent() + .ok_or_else(|| io::Error::other("failed to resolve target debug directory"))?; + Ok(target_debug.join("fake-rust-analyzer")) +} +use libmcp as _; diff --git a/crates/ra-mcp-domain/.gitignore b/crates/ra-mcp-domain/.gitignore new file mode 100644 index 0000000..ea8c4bf --- /dev/null +++ b/crates/ra-mcp-domain/.gitignore @@ -0,0 +1 @@ +/target diff --git a/crates/ra-mcp-domain/Cargo.toml b/crates/ra-mcp-domain/Cargo.toml new file mode 100644 index 0000000..eca022e --- /dev/null +++ b/crates/ra-mcp-domain/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "ra-mcp-domain" +categories.workspace = true +description = "Typed lifecycle, request, and fault algebra for the adequate-rust-mcp stack." +edition.workspace = true +keywords.workspace = true +license.workspace = true +readme.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[dependencies] +serde.workspace = true +thiserror.workspace = true + +[dev-dependencies] +assert_matches.workspace = true + +[lints] +workspace = true diff --git a/crates/ra-mcp-domain/src/fault.rs b/crates/ra-mcp-domain/src/fault.rs new file mode 100644 index 0000000..6d404ab --- /dev/null +++ b/crates/ra-mcp-domain/src/fault.rs @@ -0,0 +1,129 @@ +//! Fault taxonomy and recovery guidance. + +use crate::types::Generation; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +/// Logical fault class. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum FaultClass { + /// Underlying I/O or transport channel failure. + Transport, + /// Child process startup/liveness/exiting failures. + Process, + /// Malformed or unexpected protocol payloads. + Protocol, + /// Deadline exceeded. + Timeout, + /// Internal resource budget exhaustion. + Resource, +} + +/// Fine-grained fault code. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum FaultCode { + /// Pipe write failed with `EPIPE`. + BrokenPipe, + /// Pipe reached EOF. + UnexpectedEof, + /// Child process exited unexpectedly. + ChildExited, + /// Child process failed to spawn. + SpawnFailed, + /// Startup sequence exceeded deadline. + StartupTimedOut, + /// Request exceeded deadline. + RequestTimedOut, + /// Received an invalid protocol frame. + InvalidFrame, + /// Received invalid JSON. + InvalidJson, + /// Response could not be correlated with a pending request. + UnknownResponseId, +} + +/// Recovery strategy for a fault. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum RecoveryDirective { + /// Retry the request on the same process. + RetryInPlace, + /// Restart the worker process and retry once. + RestartAndReplay, + /// Fail-fast and bubble to the caller. + AbortRequest, +} + +/// Structured fault event. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct Fault { + /// Generation in which this fault happened. + pub generation: Generation, + /// Broad fault class. + pub class: FaultClass, + /// Specific fault code. + pub code: FaultCode, + /// Caller-facing context. + pub detail: FaultDetail, +} + +impl Fault { + /// Constructs a new fault. + #[must_use] + pub fn new( + generation: Generation, + class: FaultClass, + code: FaultCode, + detail: FaultDetail, + ) -> Self { + Self { + generation, + class, + code, + detail, + } + } + + /// Returns the default recovery directive for this fault. + #[must_use] + pub fn directive(&self) -> RecoveryDirective { + match (self.class, self.code) { + (FaultClass::Transport, FaultCode::BrokenPipe) + | (FaultClass::Transport, FaultCode::UnexpectedEof) + | (FaultClass::Process, FaultCode::ChildExited) + | (FaultClass::Process, FaultCode::SpawnFailed) + | (FaultClass::Timeout, FaultCode::StartupTimedOut) => { + RecoveryDirective::RestartAndReplay + } + (FaultClass::Timeout, FaultCode::RequestTimedOut) => { + RecoveryDirective::RestartAndReplay + } + (FaultClass::Protocol, FaultCode::UnknownResponseId) => RecoveryDirective::RetryInPlace, + (FaultClass::Protocol, FaultCode::InvalidFrame) + | (FaultClass::Protocol, FaultCode::InvalidJson) + | (FaultClass::Resource, _) => RecoveryDirective::AbortRequest, + _ => RecoveryDirective::AbortRequest, + } + } +} + +/// Typed detail payload for a fault. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct FaultDetail { + /// Human-consumable context. + pub message: String, +} + +impl FaultDetail { + /// Creates a new detail message. + #[must_use] + pub fn new(message: impl Into<String>) -> Self { + Self { + message: message.into(), + } + } +} + +/// Domain fault conversion error. +#[derive(Debug, Error)] +#[error("fault conversion failure: {0}")] +pub struct FaultConversionError(String); diff --git a/crates/ra-mcp-domain/src/lib.rs b/crates/ra-mcp-domain/src/lib.rs new file mode 100644 index 0000000..90285f0 --- /dev/null +++ b/crates/ra-mcp-domain/src/lib.rs @@ -0,0 +1,5 @@ +//! Domain model for the Adequate Rust MCP server. + +pub mod fault; +pub mod lifecycle; +pub mod types; diff --git a/crates/ra-mcp-domain/src/lifecycle.rs b/crates/ra-mcp-domain/src/lifecycle.rs new file mode 100644 index 0000000..91007ac --- /dev/null +++ b/crates/ra-mcp-domain/src/lifecycle.rs @@ -0,0 +1,259 @@ +//! Typestate machine for worker lifecycle. + +use crate::{ + fault::Fault, + types::{Generation, InvariantViolation}, +}; +use serde::{Deserialize, Serialize}; + +/// A worker in cold state (no process). +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct Cold; + +/// A worker in startup handshake. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct Starting; + +/// A healthy worker ready to serve requests. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct Ready; + +/// A worker currently recovering from failure. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Recovering { + last_fault: Fault, +} + +impl Recovering { + /// Constructs recovering state from the most recent fault. + #[must_use] + pub fn new(last_fault: Fault) -> Self { + Self { last_fault } + } + + /// Returns the most recent fault. + #[must_use] + pub fn last_fault(&self) -> &Fault { + &self.last_fault + } +} + +/// Lifecycle state with a typestate payload. +#[derive(Debug, Clone)] +pub struct Lifecycle<S> { + generation: Generation, + state: S, +} + +impl Lifecycle<Cold> { + /// Constructs a cold lifecycle. + #[must_use] + pub fn cold() -> Self { + Self { + generation: Generation::genesis(), + state: Cold, + } + } + + /// Begins startup sequence. + #[must_use] + pub fn ignite(self) -> Lifecycle<Starting> { + Lifecycle { + generation: self.generation, + state: Starting, + } + } +} + +impl Lifecycle<Starting> { + /// Marks startup as successful. + #[must_use] + pub fn arm(self) -> Lifecycle<Ready> { + Lifecycle { + generation: self.generation, + state: Ready, + } + } + + /// Marks startup as failed and enters recovery. + #[must_use] + pub fn fracture(self, fault: Fault) -> Lifecycle<Recovering> { + Lifecycle { + generation: self.generation, + state: Recovering::new(fault), + } + } +} + +impl Lifecycle<Ready> { + /// Moves from ready to recovering after a fault. + #[must_use] + pub fn fracture(self, fault: Fault) -> Lifecycle<Recovering> { + Lifecycle { + generation: self.generation, + state: Recovering::new(fault), + } + } +} + +impl Lifecycle<Recovering> { + /// Advances generation and retries startup. + #[must_use] + pub fn respawn(self) -> Lifecycle<Starting> { + Lifecycle { + generation: self.generation.next(), + state: Starting, + } + } + + /// Returns the most recent fault. + #[must_use] + pub fn last_fault(&self) -> &Fault { + self.state.last_fault() + } +} + +impl<S> Lifecycle<S> { + /// Returns the active generation. + #[must_use] + pub fn generation(&self) -> Generation { + self.generation + } + + /// Returns the typestate payload. + #[must_use] + pub fn state(&self) -> &S { + &self.state + } +} + +/// Serializable lifecycle snapshot for diagnostics. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum LifecycleSnapshot { + /// No worker is currently running. + Cold { + /// Current generation counter. + generation: Generation, + }, + /// Worker startup is in progress. + Starting { + /// Current generation counter. + generation: Generation, + }, + /// Worker is ready for requests. + Ready { + /// Current generation counter. + generation: Generation, + }, + /// Worker is recovering after a fault. + Recovering { + /// Current generation counter. + generation: Generation, + /// Most recent fault. + last_fault: Fault, + }, +} + +/// Dynamically typed lifecycle state for runtime storage. +#[derive(Debug, Clone)] +pub enum DynamicLifecycle { + /// Cold typestate wrapper. + Cold(Lifecycle<Cold>), + /// Starting typestate wrapper. + Starting(Lifecycle<Starting>), + /// Ready typestate wrapper. + Ready(Lifecycle<Ready>), + /// Recovering typestate wrapper. + Recovering(Lifecycle<Recovering>), +} + +impl DynamicLifecycle { + /// Creates a cold dynamic lifecycle. + #[must_use] + pub fn cold() -> Self { + Self::Cold(Lifecycle::cold()) + } + + /// Returns the serializable snapshot. + #[must_use] + pub fn snapshot(&self) -> LifecycleSnapshot { + match self { + Self::Cold(state) => LifecycleSnapshot::Cold { + generation: state.generation(), + }, + Self::Starting(state) => LifecycleSnapshot::Starting { + generation: state.generation(), + }, + Self::Ready(state) => LifecycleSnapshot::Ready { + generation: state.generation(), + }, + Self::Recovering(state) => LifecycleSnapshot::Recovering { + generation: state.generation(), + last_fault: state.last_fault().clone(), + }, + } + } + + /// Enters startup from cold or recovering. + pub fn begin_startup(self) -> Result<Self, InvariantViolation> { + match self { + Self::Cold(state) => Ok(Self::Starting(state.ignite())), + Self::Recovering(state) => Ok(Self::Starting(state.respawn())), + Self::Starting(_) | Self::Ready(_) => Err(InvariantViolation::new( + "invalid lifecycle transition to starting", + )), + } + } + + /// Marks startup as complete. + pub fn complete_startup(self) -> Result<Self, InvariantViolation> { + match self { + Self::Starting(state) => Ok(Self::Ready(state.arm())), + _ => Err(InvariantViolation::new( + "invalid lifecycle transition to ready", + )), + } + } + + /// Records a fault and enters recovering state. + pub fn fracture(self, fault: Fault) -> Result<Self, InvariantViolation> { + match self { + Self::Starting(state) => Ok(Self::Recovering(state.fracture(fault))), + Self::Ready(state) => Ok(Self::Recovering(state.fracture(fault))), + Self::Recovering(state) => Ok(Self::Recovering(Lifecycle { + generation: state.generation(), + state: Recovering::new(fault), + })), + Self::Cold(_) => Err(InvariantViolation::new("cannot fracture cold lifecycle")), + } + } +} + +#[cfg(test)] +mod tests { + use super::{DynamicLifecycle, Lifecycle, LifecycleSnapshot}; + use crate::fault::{Fault, FaultClass, FaultCode, FaultDetail}; + + #[test] + fn typestate_chain_advances_generation_on_recovery() { + let cold = Lifecycle::cold(); + let starting = cold.ignite(); + let ready = starting.arm(); + let ready_generation = ready.generation(); + let fault = Fault::new( + ready_generation, + FaultClass::Transport, + FaultCode::BrokenPipe, + FaultDetail::new("broken pipe"), + ); + let recovering = ready.fracture(fault); + let restarted = recovering.respawn(); + assert!(restarted.generation() > ready_generation); + } + + #[test] + fn dynamic_snapshot_of_recovering_is_infallible() { + let cold = DynamicLifecycle::cold(); + assert!(matches!(cold.snapshot(), LifecycleSnapshot::Cold { .. })); + } +} diff --git a/crates/ra-mcp-domain/src/types.rs b/crates/ra-mcp-domain/src/types.rs new file mode 100644 index 0000000..db709d6 --- /dev/null +++ b/crates/ra-mcp-domain/src/types.rs @@ -0,0 +1,460 @@ +//! Fundamental domain types. + +use serde::{Deserialize, Deserializer, Serialize}; +use std::{ + num::NonZeroU64, + path::{Path, PathBuf}, +}; +use thiserror::Error; + +/// A value that failed a domain-level invariant. +#[derive(Debug, Clone, PartialEq, Eq, Error)] +#[error("domain invariant violated: {detail}")] +pub struct InvariantViolation { + detail: &'static str, +} + +impl InvariantViolation { + /// Creates a new invariant violation. + #[must_use] + pub fn new(detail: &'static str) -> Self { + Self { detail } + } +} + +/// Process generation for a rust-analyzer worker. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[serde(transparent)] +pub struct Generation(NonZeroU64); + +impl Generation { + /// Returns the first generation. + #[must_use] + pub fn genesis() -> Self { + Self(NonZeroU64::MIN) + } + + /// Returns the inner integer value. + #[must_use] + pub fn get(self) -> u64 { + self.0.get() + } + + /// Advances to the next generation. + #[must_use] + pub fn next(self) -> Self { + let next = self.get().saturating_add(1); + let non_zero = NonZeroU64::new(next).map_or(NonZeroU64::MAX, |value| value); + Self(non_zero) + } +} + +/// A non-empty absolute workspace root. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct WorkspaceRoot(PathBuf); + +impl WorkspaceRoot { + /// Constructs a validated workspace root. + pub fn try_new(path: PathBuf) -> Result<Self, InvariantViolation> { + if !path.is_absolute() { + return Err(InvariantViolation::new("workspace root must be absolute")); + } + if path.as_os_str().is_empty() { + return Err(InvariantViolation::new("workspace root must be non-empty")); + } + Ok(Self(path)) + } + + /// Returns the root path. + #[must_use] + pub fn as_path(&self) -> &Path { + self.0.as_path() + } +} + +/// A non-empty absolute source file path. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize)] +#[serde(transparent)] +pub struct SourceFilePath(PathBuf); + +impl SourceFilePath { + /// Constructs a validated source file path. + pub fn try_new(path: PathBuf) -> Result<Self, InvariantViolation> { + if !path.is_absolute() { + return Err(InvariantViolation::new("source file path must be absolute")); + } + if path.as_os_str().is_empty() { + return Err(InvariantViolation::new( + "source file path must be non-empty", + )); + } + Ok(Self(path)) + } + + /// Returns the underlying path. + #[must_use] + pub fn as_path(&self) -> &Path { + self.0.as_path() + } +} + +impl<'de> Deserialize<'de> for SourceFilePath { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + let path = PathBuf::deserialize(deserializer)?; + Self::try_new(path).map_err(serde::de::Error::custom) + } +} + +/// One-indexed source line number. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[serde(transparent)] +pub struct OneIndexedLine(NonZeroU64); + +impl OneIndexedLine { + /// Constructs a one-indexed line. + pub fn try_new(raw: u64) -> Result<Self, InvariantViolation> { + let line = NonZeroU64::new(raw).ok_or(InvariantViolation::new("line must be >= 1"))?; + Ok(Self(line)) + } + + /// Returns the one-indexed value. + #[must_use] + pub fn get(self) -> u64 { + self.0.get() + } + + /// Returns the corresponding zero-indexed value for LSP. + #[must_use] + pub fn to_zero_indexed(self) -> u32 { + let raw = self.get().saturating_sub(1); + u32::try_from(raw).unwrap_or(u32::MAX) + } +} + +/// One-indexed source column number. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[serde(transparent)] +pub struct OneIndexedColumn(NonZeroU64); + +impl OneIndexedColumn { + /// Constructs a one-indexed column. + pub fn try_new(raw: u64) -> Result<Self, InvariantViolation> { + let column = NonZeroU64::new(raw).ok_or(InvariantViolation::new("column must be >= 1"))?; + Ok(Self(column)) + } + + /// Returns the one-indexed value. + #[must_use] + pub fn get(self) -> u64 { + self.0.get() + } + + /// Returns the corresponding zero-indexed value for LSP. + #[must_use] + pub fn to_zero_indexed(self) -> u32 { + let raw = self.get().saturating_sub(1); + u32::try_from(raw).unwrap_or(u32::MAX) + } +} + +/// A file-local source point. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +pub struct SourcePoint { + line: OneIndexedLine, + column: OneIndexedColumn, +} + +impl SourcePoint { + /// Constructs a file-local source point. + #[must_use] + pub fn new(line: OneIndexedLine, column: OneIndexedColumn) -> Self { + Self { line, column } + } + + /// Returns the line component. + #[must_use] + pub fn line(self) -> OneIndexedLine { + self.line + } + + /// Returns the column component. + #[must_use] + pub fn column(self) -> OneIndexedColumn { + self.column + } +} + +/// Request position in a source file. +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct SourcePosition { + file_path: SourceFilePath, + #[serde(flatten)] + point: SourcePoint, +} + +impl SourcePosition { + /// Constructs a request position. + #[must_use] + pub fn new(file_path: SourceFilePath, point: SourcePoint) -> Self { + Self { file_path, point } + } + + /// Returns the source file path. + #[must_use] + pub fn file_path(&self) -> &SourceFilePath { + &self.file_path + } + + /// Returns the file-local point. + #[must_use] + pub fn point(&self) -> SourcePoint { + self.point + } + + /// Returns the one-indexed line. + #[must_use] + pub fn line(&self) -> OneIndexedLine { + self.point.line() + } + + /// Returns the one-indexed column. + #[must_use] + pub fn column(&self) -> OneIndexedColumn { + self.point.column() + } +} + +#[derive(Debug, Clone, Deserialize)] +struct SourcePositionWire { + file_path: SourceFilePath, + #[serde(flatten)] + point: SourcePoint, +} + +impl<'de> Deserialize<'de> for SourcePosition { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + let SourcePositionWire { file_path, point } = + SourcePositionWire::deserialize(deserializer)?; + Ok(Self::new(file_path, point)) + } +} + +/// A concrete source location. +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct SourceLocation { + file_path: SourceFilePath, + #[serde(flatten)] + point: SourcePoint, +} + +impl SourceLocation { + /// Constructs a source location. + #[must_use] + pub fn new(file_path: SourceFilePath, point: SourcePoint) -> Self { + Self { file_path, point } + } + + /// Returns the source file path. + #[must_use] + pub fn file_path(&self) -> &SourceFilePath { + &self.file_path + } + + /// Returns the file-local point. + #[must_use] + pub fn point(&self) -> SourcePoint { + self.point + } + + /// Returns the one-indexed line. + #[must_use] + pub fn line(&self) -> OneIndexedLine { + self.point.line() + } + + /// Returns the one-indexed column. + #[must_use] + pub fn column(&self) -> OneIndexedColumn { + self.point.column() + } +} + +#[derive(Debug, Clone, Deserialize)] +struct SourceLocationWire { + file_path: SourceFilePath, + #[serde(flatten)] + point: SourcePoint, +} + +impl<'de> Deserialize<'de> for SourceLocation { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + let SourceLocationWire { file_path, point } = + SourceLocationWire::deserialize(deserializer)?; + Ok(Self::new(file_path, point)) + } +} + +/// A source range in a specific file. +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct SourceRange { + file_path: SourceFilePath, + start: SourcePoint, + end: SourcePoint, +} + +impl SourceRange { + /// Constructs a validated source range. + pub fn try_new( + file_path: SourceFilePath, + start: SourcePoint, + end: SourcePoint, + ) -> Result<Self, InvariantViolation> { + if end < start { + return Err(InvariantViolation::new( + "source range end must not precede start", + )); + } + Ok(Self { + file_path, + start, + end, + }) + } + + /// Returns the source file path. + #[must_use] + pub fn file_path(&self) -> &SourceFilePath { + &self.file_path + } + + /// Returns the start point. + #[must_use] + pub fn start(&self) -> SourcePoint { + self.start + } + + /// Returns the end point. + #[must_use] + pub fn end(&self) -> SourcePoint { + self.end + } +} + +#[derive(Debug, Clone, Deserialize)] +struct SourceRangeWire { + file_path: SourceFilePath, + start: SourcePoint, + end: SourcePoint, +} + +impl<'de> Deserialize<'de> for SourceRange { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + let SourceRangeWire { + file_path, + start, + end, + } = SourceRangeWire::deserialize(deserializer)?; + Self::try_new(file_path, start, end).map_err(serde::de::Error::custom) + } +} + +/// A monotonically increasing request sequence. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct RequestSequence(NonZeroU64); + +impl RequestSequence { + /// Starts a fresh sequence. + #[must_use] + pub fn genesis() -> Self { + Self(NonZeroU64::MIN) + } + + /// Returns the current integer value. + #[must_use] + pub fn get(self) -> u64 { + self.0.get() + } + + /// Consumes and returns the next sequence. + #[must_use] + pub fn next(self) -> Self { + let next = self.get().saturating_add(1); + let non_zero = NonZeroU64::new(next).map_or(NonZeroU64::MAX, |value| value); + Self(non_zero) + } +} + +#[cfg(test)] +mod tests { + use super::{ + Generation, InvariantViolation, OneIndexedColumn, OneIndexedLine, RequestSequence, + SourceFilePath, SourcePoint, SourceRange, + }; + use assert_matches::assert_matches; + use std::{num::NonZeroU64, path::PathBuf}; + + #[test] + fn generation_advances_monotonically() { + let first = Generation::genesis(); + let second = first.next(); + let third = second.next(); + assert!(first < second); + assert!(second < third); + } + + #[test] + fn generation_saturates_at_maximum() { + let max = Generation(NonZeroU64::MAX); + assert_eq!(max.next(), max); + } + + #[test] + fn line_must_be_one_or_greater() { + assert_matches!(OneIndexedLine::try_new(0), Err(InvariantViolation { .. })); + } + + #[test] + fn column_must_be_one_or_greater() { + assert_matches!(OneIndexedColumn::try_new(0), Err(InvariantViolation { .. })); + } + + #[test] + fn source_range_rejects_reversed_points() { + let file_path = SourceFilePath::try_new(PathBuf::from("/tmp/range.rs")); + assert!(file_path.is_ok()); + let file_path = match file_path { + Ok(value) => value, + Err(_) => return, + }; + let start = SourcePoint::new( + OneIndexedLine::try_new(4).unwrap_or(OneIndexedLine(NonZeroU64::MIN)), + OneIndexedColumn::try_new(3).unwrap_or(OneIndexedColumn(NonZeroU64::MIN)), + ); + let end = SourcePoint::new( + OneIndexedLine::try_new(2).unwrap_or(OneIndexedLine(NonZeroU64::MIN)), + OneIndexedColumn::try_new(1).unwrap_or(OneIndexedColumn(NonZeroU64::MIN)), + ); + assert_matches!( + SourceRange::try_new(file_path, start, end), + Err(InvariantViolation { .. }) + ); + } + + #[test] + fn request_sequence_saturates_at_maximum() { + let max = RequestSequence(NonZeroU64::MAX); + assert_eq!(max.next().get(), u64::MAX); + } +} diff --git a/crates/ra-mcp-engine/.gitignore b/crates/ra-mcp-engine/.gitignore new file mode 100644 index 0000000..ea8c4bf --- /dev/null +++ b/crates/ra-mcp-engine/.gitignore @@ -0,0 +1 @@ +/target diff --git a/crates/ra-mcp-engine/Cargo.toml b/crates/ra-mcp-engine/Cargo.toml new file mode 100644 index 0000000..d5d870d --- /dev/null +++ b/crates/ra-mcp-engine/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "ra-mcp-engine" +categories.workspace = true +description = "Resilient rust-analyzer transport and worker-supervision engine used by adequate-rust-mcp." +edition.workspace = true +keywords.workspace = true +license.workspace = true +readme.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[dependencies] +lsp-types.workspace = true +ra-mcp-domain = { path = "../ra-mcp-domain" } +serde.workspace = true +serde_json.workspace = true +thiserror.workspace = true +tokio.workspace = true +tracing.workspace = true +url.workspace = true + +[dev-dependencies] +serial_test.workspace = true +tempfile.workspace = true + +[lints] +workspace = true diff --git a/crates/ra-mcp-engine/src/bin/fake-rust-analyzer.rs b/crates/ra-mcp-engine/src/bin/fake-rust-analyzer.rs new file mode 100644 index 0000000..c64b68b --- /dev/null +++ b/crates/ra-mcp-engine/src/bin/fake-rust-analyzer.rs @@ -0,0 +1,467 @@ +//! Fault-injectable fake rust-analyzer used by integration tests. + +use lsp_types as _; +use ra_mcp_domain as _; +use ra_mcp_engine as _; +use serde as _; +use serde_json::{Value, json}; +#[cfg(test)] +use serial_test as _; +use std::{ + fs, + io::{self, BufRead, BufReader, Read, Write}, + path::{Path, PathBuf}, + time::Duration, +}; +#[cfg(test)] +use tempfile as _; +use thiserror as _; +use tokio as _; +use tracing as _; +use url as _; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum Mode { + Stable, + CrashOnFirstHover, +} + +fn main() -> Result<(), Box<dyn std::error::Error>> { + run().map_err(|error| Box::new(error) as Box<dyn std::error::Error>) +} + +fn run() -> io::Result<()> { + let mut mode = Mode::Stable; + let mut marker = None::<PathBuf>; + let mut hover_delay = Duration::ZERO; + let mut execute_command_delay = Duration::ZERO; + let mut execute_command_log = None::<PathBuf>; + let mut diagnostic_warmup_count = 0_u8; + let mut diagnostic_cancel_count = 0_u8; + let mut strict_root_match = false; + let mut workspace_root = None::<PathBuf>; + let mut args = std::env::args().skip(1); + loop { + let argument = args.next(); + let Some(argument) = argument else { + break; + }; + match argument.as_str() { + "--mode" => { + if let Some(value) = args.next() { + mode = parse_mode(&value).unwrap_or(Mode::Stable); + } + } + "--crash-marker" => { + if let Some(value) = args.next() { + marker = Some(PathBuf::from(value)); + } + } + "--hover-delay-ms" => { + if let Some(value) = args.next() { + let parsed = value.parse::<u64>().ok(); + if let Some(delay_ms) = parsed { + hover_delay = Duration::from_millis(delay_ms); + } + } + } + "--execute-command-delay-ms" => { + if let Some(value) = args.next() { + let parsed = value.parse::<u64>().ok(); + if let Some(delay_ms) = parsed { + execute_command_delay = Duration::from_millis(delay_ms); + } + } + } + "--execute-command-log" => { + if let Some(value) = args.next() { + execute_command_log = Some(PathBuf::from(value)); + } + } + "--diagnostic-warmup-count" => { + if let Some(value) = args.next() { + let parsed = value.parse::<u8>().ok(); + if let Some(count) = parsed { + diagnostic_warmup_count = count; + } + } + } + "--diagnostic-cancel-count" => { + if let Some(value) = args.next() { + let parsed = value.parse::<u8>().ok(); + if let Some(count) = parsed { + diagnostic_cancel_count = count; + } + } + } + "--strict-root-match" => { + strict_root_match = true; + } + _ => {} + } + } + + let stdin = io::stdin(); + let stdout = io::stdout(); + let mut reader = BufReader::new(stdin.lock()); + let mut writer = stdout.lock(); + + loop { + let frame = match read_frame(&mut reader) { + Ok(frame) => frame, + Err(error) if error.kind() == io::ErrorKind::UnexpectedEof => break, + Err(error) => return Err(error), + }; + let message: Value = serde_json::from_slice(&frame) + .map_err(|error| io::Error::new(io::ErrorKind::InvalidData, error.to_string()))?; + if let Some(method) = message.get("method").and_then(Value::as_str) { + if method == "initialized" { + continue; + } + + let request_id = message.get("id").cloned(); + let Some(request_id) = request_id else { + continue; + }; + if method == "initialize" { + workspace_root = initialized_workspace_root(&message); + } + + if mode == Mode::CrashOnFirstHover + && method == "textDocument/hover" + && should_crash(&marker)? + { + std::process::exit(0); + } + if method == "textDocument/hover" && !hover_delay.is_zero() { + std::thread::sleep(hover_delay); + } + if method == "workspace/executeCommand" { + if let Some(path) = execute_command_log.as_ref() { + log_execute_command_effect(path, &message)?; + } + if !execute_command_delay.is_zero() { + std::thread::sleep(execute_command_delay); + } + } + + let response = if strict_root_match + && request_targets_outside_workspace(&message, workspace_root.as_deref()) + { + strict_root_mismatch_response(method, request_id, &message) + } else if method == "textDocument/diagnostic" && diagnostic_cancel_count > 0 { + diagnostic_cancel_count = diagnostic_cancel_count.saturating_sub(1); + server_cancelled_response(request_id) + } else if method == "textDocument/diagnostic" && diagnostic_warmup_count > 0 { + diagnostic_warmup_count = diagnostic_warmup_count.saturating_sub(1); + warmup_unlinked_diagnostic_response(request_id) + } else { + make_response(method, request_id, &message) + }; + write_frame(&mut writer, &response)?; + } + } + + Ok(()) +} + +fn parse_mode(raw: &str) -> Option<Mode> { + match raw { + "stable" => Some(Mode::Stable), + "crash_on_first_hover" => Some(Mode::CrashOnFirstHover), + _ => None, + } +} + +fn should_crash(marker: &Option<PathBuf>) -> io::Result<bool> { + let Some(marker) = marker else { + return Ok(true); + }; + if marker.exists() { + return Ok(false); + } + fs::write(marker, b"crashed")?; + Ok(true) +} + +fn log_execute_command_effect(path: &PathBuf, request: &Value) -> io::Result<()> { + let command = request + .get("params") + .and_then(|params| params.get("command")) + .and_then(Value::as_str) + .unwrap_or("<missing-command>"); + let mut file = fs::OpenOptions::new() + .create(true) + .append(true) + .open(path)?; + writeln!(file, "{command}")?; + Ok(()) +} + +fn initialized_workspace_root(request: &Value) -> Option<PathBuf> { + let root_uri = request + .get("params") + .and_then(|params| params.get("rootUri")) + .and_then(Value::as_str)?; + let root_url = url::Url::parse(root_uri).ok()?; + root_url.to_file_path().ok() +} + +fn request_targets_outside_workspace(request: &Value, workspace_root: Option<&Path>) -> bool { + let Some(workspace_root) = workspace_root else { + return false; + }; + let file_path = request_document_path(request); + let Some(file_path) = file_path else { + return false; + }; + !file_path.starts_with(workspace_root) +} + +fn request_document_path(request: &Value) -> Option<PathBuf> { + let uri = request + .get("params") + .and_then(|params| params.get("textDocument")) + .and_then(|doc| doc.get("uri")) + .and_then(Value::as_str)?; + let url = url::Url::parse(uri).ok()?; + url.to_file_path().ok() +} + +fn strict_root_mismatch_response(method: &str, request_id: Value, request: &Value) -> Value { + match method { + "textDocument/hover" => json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": Value::Null + }), + "textDocument/definition" => json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": Value::Null + }), + "textDocument/references" => json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": Value::Null + }), + "textDocument/rename" => { + let uri = request + .get("params") + .and_then(|params| params.get("textDocument")) + .and_then(|doc| doc.get("uri")) + .and_then(Value::as_str) + .unwrap_or("file:///tmp/fallback.rs") + .to_owned(); + json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": { + "changes": { + uri: [] + } + } + }) + } + "textDocument/diagnostic" => warmup_unlinked_diagnostic_response(request_id), + _ => make_response(method, request_id, request), + } +} + +fn make_response(method: &str, request_id: Value, request: &Value) -> Value { + match method { + "initialize" => json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": { + "capabilities": {} + } + }), + "textDocument/hover" => json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": { + "contents": { + "kind": "markdown", + "value": "hover::ok" + } + } + }), + "textDocument/definition" => { + let uri = request + .get("params") + .and_then(|params| params.get("textDocument")) + .and_then(|doc| doc.get("uri")) + .cloned() + .unwrap_or(Value::String("file:///tmp/fallback.rs".to_owned())); + json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": [{ + "uri": uri, + "range": { + "start": { "line": 2, "character": 3 }, + "end": { "line": 2, "character": 8 } + } + }] + }) + } + "textDocument/references" => { + let uri = request + .get("params") + .and_then(|params| params.get("textDocument")) + .and_then(|doc| doc.get("uri")) + .cloned() + .unwrap_or(Value::String("file:///tmp/fallback.rs".to_owned())); + json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": [{ + "uri": uri, + "range": { + "start": { "line": 4, "character": 1 }, + "end": { "line": 4, "character": 5 } + } + }] + }) + } + "textDocument/rename" => { + let uri = request + .get("params") + .and_then(|params| params.get("textDocument")) + .and_then(|doc| doc.get("uri")) + .and_then(Value::as_str) + .unwrap_or("file:///tmp/fallback.rs") + .to_owned(); + json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": { + "changes": { + uri: [ + { + "range": { + "start": { "line": 1, "character": 1 }, + "end": { "line": 1, "character": 4 } + }, + "newText": "renamed_symbol" + } + ] + } + } + }) + } + "textDocument/diagnostic" => json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": { + "kind": "full", + "items": [{ + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 3 } + }, + "severity": 1, + "message": "fake diagnostic" + }] + } + }), + "workspace/executeCommand" => { + let command = request + .get("params") + .and_then(|params| params.get("command")) + .cloned() + .unwrap_or(Value::Null); + json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": { + "ack": "ok", + "command": command + } + }) + } + _ => json!({ + "jsonrpc": "2.0", + "id": request_id, + "error": { + "code": -32601, + "message": format!("method not found: {method}") + } + }), + } +} + +fn warmup_unlinked_diagnostic_response(request_id: Value) -> Value { + json!({ + "jsonrpc": "2.0", + "id": request_id, + "result": { + "kind": "full", + "items": [{ + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 0 } + }, + "severity": 2, + "code": "unlinked-file", + "message": "This file is not part of any crate, so rust-analyzer can't offer IDE services." + }] + } + }) +} + +fn server_cancelled_response(request_id: Value) -> Value { + json!({ + "jsonrpc": "2.0", + "id": request_id, + "error": { + "code": -32802, + "message": "server cancelled request during workspace reload" + } + }) +} + +fn read_frame(reader: &mut BufReader<impl Read>) -> io::Result<Vec<u8>> { + let mut content_length = None::<usize>; + loop { + let mut line = String::new(); + let bytes = reader.read_line(&mut line)?; + if bytes == 0 { + return Err(io::Error::new( + io::ErrorKind::UnexpectedEof, + "EOF while reading headers", + )); + } + if line == "\r\n" || line == "\n" { + break; + } + let trimmed = line.trim_end_matches(['\r', '\n']); + if let Some(raw_length) = trimmed.strip_prefix("Content-Length:") { + let parsed = raw_length.trim().parse::<usize>().map_err(|error| { + io::Error::new( + io::ErrorKind::InvalidData, + format!("invalid Content-Length header: {error}"), + ) + })?; + content_length = Some(parsed); + } + } + + let length = content_length.ok_or_else(|| { + io::Error::new(io::ErrorKind::InvalidData, "missing Content-Length header") + })?; + let mut payload = vec![0_u8; length]; + reader.read_exact(&mut payload)?; + Ok(payload) +} + +fn write_frame(writer: &mut impl Write, payload: &Value) -> io::Result<()> { + let serialized = serde_json::to_vec(payload) + .map_err(|error| io::Error::new(io::ErrorKind::InvalidData, error.to_string()))?; + let header = format!("Content-Length: {}\r\n\r\n", serialized.len()); + writer.write_all(header.as_bytes())?; + writer.write_all(&serialized)?; + writer.flush()?; + Ok(()) +} diff --git a/crates/ra-mcp-engine/src/config.rs b/crates/ra-mcp-engine/src/config.rs new file mode 100644 index 0000000..8d116d5 --- /dev/null +++ b/crates/ra-mcp-engine/src/config.rs @@ -0,0 +1,79 @@ +use ra_mcp_domain::types::{InvariantViolation, WorkspaceRoot}; +use std::{path::PathBuf, time::Duration}; + +/// Exponential backoff policy for worker restart attempts. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct BackoffPolicy { + /// Minimum delay between restart attempts. + pub floor: Duration, + /// Maximum delay between restart attempts. + pub ceiling: Duration, +} + +impl BackoffPolicy { + /// Builds a validated backoff policy. + pub fn try_new(floor: Duration, ceiling: Duration) -> Result<Self, InvariantViolation> { + if floor.is_zero() { + return Err(InvariantViolation::new("backoff floor must be non-zero")); + } + if ceiling < floor { + return Err(InvariantViolation::new( + "backoff ceiling must be greater than or equal to floor", + )); + } + Ok(Self { floor, ceiling }) + } +} + +/// Runtime engine configuration. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct EngineConfig { + /// Absolute workspace root used for rust-analyzer process cwd/root URI. + pub workspace_root: WorkspaceRoot, + /// rust-analyzer executable path. + pub rust_analyzer_binary: PathBuf, + /// Additional rust-analyzer process arguments. + pub rust_analyzer_args: Vec<String>, + /// Additional rust-analyzer process environment variables. + pub rust_analyzer_env: Vec<(String, String)>, + /// Startup handshake timeout. + pub startup_timeout: Duration, + /// Timeout for ordinary requests. + pub request_timeout: Duration, + /// Restart backoff policy. + pub backoff_policy: BackoffPolicy, +} + +impl EngineConfig { + /// Builds validated engine configuration. + pub fn try_new( + workspace_root: WorkspaceRoot, + rust_analyzer_binary: PathBuf, + rust_analyzer_args: Vec<String>, + rust_analyzer_env: Vec<(String, String)>, + startup_timeout: Duration, + request_timeout: Duration, + backoff_policy: BackoffPolicy, + ) -> Result<Self, InvariantViolation> { + if rust_analyzer_binary.as_os_str().is_empty() { + return Err(InvariantViolation::new( + "rust-analyzer binary path must be non-empty", + )); + } + if startup_timeout.is_zero() { + return Err(InvariantViolation::new("startup timeout must be non-zero")); + } + if request_timeout.is_zero() { + return Err(InvariantViolation::new("request timeout must be non-zero")); + } + Ok(Self { + workspace_root, + rust_analyzer_binary, + rust_analyzer_args, + rust_analyzer_env, + startup_timeout, + request_timeout, + backoff_policy, + }) + } +} diff --git a/crates/ra-mcp-engine/src/error.rs b/crates/ra-mcp-engine/src/error.rs new file mode 100644 index 0000000..f40e1ae --- /dev/null +++ b/crates/ra-mcp-engine/src/error.rs @@ -0,0 +1,77 @@ +use crate::lsp_transport::RpcErrorPayload; +use ra_mcp_domain::{fault::Fault, types::InvariantViolation}; +use serde_json::Value; +use thiserror::Error; + +/// Engine result type. +pub type EngineResult<T> = Result<T, EngineError>; + +/// Structured rust-analyzer response error. +#[derive(Debug, Clone, Error)] +#[error("lsp response error: code={code}, message={message}")] +pub struct LspResponseError { + /// LSP JSON-RPC error code. + pub code: i64, + /// LSP JSON-RPC error message. + pub message: String, + /// Optional JSON-RPC error data payload. + pub data: Option<Value>, +} + +/// Engine failure type. +#[derive(Debug, Error)] +pub enum EngineError { + /// I/O failure while syncing source documents. + #[error("io error: {0}")] + Io(#[from] std::io::Error), + /// Domain invariant was violated. + #[error(transparent)] + Invariant(#[from] InvariantViolation), + /// Transport/process/protocol fault. + #[error("engine fault: {0:?}")] + Fault(Fault), + /// rust-analyzer returned a JSON-RPC error object. + #[error(transparent)] + LspResponse(#[from] LspResponseError), + /// Response payload could not be deserialized into expected type. + #[error("invalid lsp payload for method {method}: {message}")] + InvalidPayload { + /// Request method. + method: &'static str, + /// Decoder error detail. + message: String, + }, + /// Request params could not be serialized into JSON. + #[error("invalid lsp request payload for method {method}: {message}")] + InvalidRequest { + /// Request method. + method: &'static str, + /// Encoder error detail. + message: String, + }, + /// Path to URL conversion failed. + #[error("path cannot be represented as file URL")] + InvalidFileUrl, +} + +impl From<Fault> for EngineError { + fn from(value: Fault) -> Self { + Self::Fault(value) + } +} + +impl From<RpcErrorPayload> for LspResponseError { + fn from(value: RpcErrorPayload) -> Self { + Self { + code: value.code, + message: value.message, + data: value.data, + } + } +} + +impl From<RpcErrorPayload> for EngineError { + fn from(value: RpcErrorPayload) -> Self { + Self::LspResponse(value.into()) + } +} diff --git a/crates/ra-mcp-engine/src/lib.rs b/crates/ra-mcp-engine/src/lib.rs new file mode 100644 index 0000000..3d36a5b --- /dev/null +++ b/crates/ra-mcp-engine/src/lib.rs @@ -0,0 +1,20 @@ +#![recursion_limit = "512"] + +//! Resilient rust-analyzer execution engine and typed LSP façade. + +#[cfg(test)] +use serial_test as _; +#[cfg(test)] +use tempfile as _; + +mod config; +mod error; +mod lsp_transport; +mod supervisor; + +pub use config::{BackoffPolicy, EngineConfig}; +pub use error::{EngineError, EngineResult}; +pub use supervisor::{ + DiagnosticEntry, DiagnosticLevel, DiagnosticsReport, Engine, HoverPayload, + MethodTelemetrySnapshot, RenameReport, TelemetrySnapshot, TelemetryTotals, +}; diff --git a/crates/ra-mcp-engine/src/lsp_transport.rs b/crates/ra-mcp-engine/src/lsp_transport.rs new file mode 100644 index 0000000..c47d4f2 --- /dev/null +++ b/crates/ra-mcp-engine/src/lsp_transport.rs @@ -0,0 +1,717 @@ +use crate::config::EngineConfig; +use ra_mcp_domain::{ + fault::{Fault, FaultClass, FaultCode, FaultDetail}, + types::Generation, +}; +use serde::{Deserialize, Serialize}; +use serde_json::{Value, json}; +use std::{ + collections::HashMap, + io, + process::Stdio, + sync::{ + Arc, + atomic::{AtomicU64, Ordering}, + }, + time::Duration, +}; +use tokio::{ + io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader}, + process::{Child, ChildStdin, ChildStdout, Command}, + sync::{Mutex, oneshot, watch}, + task::JoinHandle, +}; +use tracing::{debug, warn}; +use url::Url; + +#[derive(Debug, Clone)] +pub(crate) struct WorkerHandle { + generation: Generation, + child: Arc<Mutex<Child>>, + writer: Arc<Mutex<ChildStdin>>, + pending: Arc<Mutex<HashMap<u64, oneshot::Sender<PendingOutcome>>>>, + next_request_id: Arc<AtomicU64>, + terminal_fault_rx: watch::Receiver<Option<Fault>>, + reader_task: Arc<Mutex<Option<JoinHandle<()>>>>, + stderr_task: Arc<Mutex<Option<JoinHandle<()>>>>, +} + +#[derive(Debug)] +enum PendingOutcome { + Result(Value), + ResponseError(RpcErrorPayload), + TransportFault(Fault), +} + +#[derive(Debug, Clone, Deserialize)] +pub(crate) struct RpcErrorPayload { + pub(crate) code: i64, + pub(crate) message: String, + pub(crate) data: Option<Value>, +} + +#[derive(Debug)] +pub(crate) enum WorkerRequestError { + Fault(Fault), + Response(RpcErrorPayload), +} + +impl WorkerHandle { + pub(crate) fn terminal_fault(&self) -> Option<Fault> { + self.terminal_fault_rx.borrow().clone() + } + + pub(crate) async fn send_notification( + &self, + method: &'static str, + params: &impl Serialize, + ) -> Result<(), Fault> { + let payload = json!({ + "jsonrpc": "2.0", + "method": method, + "params": params, + }); + let mut writer = self.writer.lock().await; + write_frame(&mut writer, &payload).await.map_err(|error| { + classify_io_fault( + self.generation, + FaultClass::Transport, + "failed to write notification", + error, + ) + }) + } + + pub(crate) async fn send_request( + &self, + method: &'static str, + params: &impl Serialize, + timeout: Duration, + ) -> Result<Value, WorkerRequestError> { + let request_id = self.next_request_id.fetch_add(1, Ordering::Relaxed); + let (sender, receiver) = oneshot::channel::<PendingOutcome>(); + { + let mut pending = self.pending.lock().await; + let previous = pending.insert(request_id, sender); + if let Some(previous_sender) = previous { + drop(previous_sender); + } + } + + let payload = json!({ + "jsonrpc": "2.0", + "id": request_id, + "method": method, + "params": params, + }); + + let write_result = { + let mut writer = self.writer.lock().await; + write_frame(&mut writer, &payload).await + }; + + if let Err(error) = write_result { + let mut pending = self.pending.lock().await; + let removed = pending.remove(&request_id); + if let Some(sender) = removed { + drop(sender); + } + return Err(WorkerRequestError::Fault(classify_io_fault( + self.generation, + FaultClass::Transport, + "failed to write request", + error, + ))); + } + + match tokio::time::timeout(timeout, receiver).await { + Ok(Ok(PendingOutcome::Result(value))) => Ok(value), + Ok(Ok(PendingOutcome::ResponseError(error))) => { + Err(WorkerRequestError::Response(error)) + } + Ok(Ok(PendingOutcome::TransportFault(fault))) => Err(WorkerRequestError::Fault(fault)), + Ok(Err(_closed)) => Err(WorkerRequestError::Fault(Fault::new( + self.generation, + FaultClass::Transport, + FaultCode::UnexpectedEof, + FaultDetail::new("response channel closed before result"), + ))), + Err(_elapsed) => { + let mut pending = self.pending.lock().await; + let removed = pending.remove(&request_id); + if let Some(sender) = removed { + drop(sender); + } + Err(WorkerRequestError::Fault(Fault::new( + self.generation, + FaultClass::Timeout, + FaultCode::RequestTimedOut, + FaultDetail::new(format!("request timed out for method {method}")), + ))) + } + } + } + + pub(crate) async fn terminate(&self) { + let mut child = self.child.lock().await; + if child.id().is_some() + && let Err(error) = child.kill().await + { + debug!( + generation = self.generation.get(), + "failed to kill rust-analyzer process cleanly: {error}" + ); + } + if let Err(error) = child.wait().await { + debug!( + generation = self.generation.get(), + "failed to wait rust-analyzer process cleanly: {error}" + ); + } + + if let Some(task) = self.reader_task.lock().await.take() { + task.abort(); + } + if let Some(task) = self.stderr_task.lock().await.take() { + task.abort(); + } + } +} + +pub(crate) async fn spawn_worker( + config: &EngineConfig, + generation: Generation, +) -> Result<WorkerHandle, Fault> { + let mut command = Command::new(&config.rust_analyzer_binary); + let _args = command.args(&config.rust_analyzer_args); + let _envs = command.envs(config.rust_analyzer_env.iter().cloned()); + let _configured = command + .current_dir(config.workspace_root.as_path()) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let mut child = command.spawn().map_err(|error| { + classify_io_fault( + generation, + FaultClass::Process, + "failed to spawn rust-analyzer", + error, + ) + })?; + + let stdin = child.stdin.take().ok_or_else(|| { + Fault::new( + generation, + FaultClass::Process, + FaultCode::SpawnFailed, + FaultDetail::new("missing stdin pipe from rust-analyzer process"), + ) + })?; + let stdout = child.stdout.take().ok_or_else(|| { + Fault::new( + generation, + FaultClass::Process, + FaultCode::SpawnFailed, + FaultDetail::new("missing stdout pipe from rust-analyzer process"), + ) + })?; + let stderr = child.stderr.take().ok_or_else(|| { + Fault::new( + generation, + FaultClass::Process, + FaultCode::SpawnFailed, + FaultDetail::new("missing stderr pipe from rust-analyzer process"), + ) + })?; + + let child = Arc::new(Mutex::new(child)); + let writer = Arc::new(Mutex::new(stdin)); + let pending = Arc::new(Mutex::new( + HashMap::<u64, oneshot::Sender<PendingOutcome>>::new(), + )); + let next_request_id = Arc::new(AtomicU64::new(1)); + let (terminal_fault_tx, terminal_fault_rx) = watch::channel(None::<Fault>); + + let reader_task = { + let pending = Arc::clone(&pending); + let terminal_fault_tx = terminal_fault_tx.clone(); + tokio::spawn(async move { + read_stdout_loop(generation, stdout, pending, terminal_fault_tx).await; + }) + }; + + let stderr_task = tokio::spawn(async move { + stream_stderr(generation, stderr).await; + }); + + let handle = WorkerHandle { + generation, + child, + writer, + pending, + next_request_id, + terminal_fault_rx, + reader_task: Arc::new(Mutex::new(Some(reader_task))), + stderr_task: Arc::new(Mutex::new(Some(stderr_task))), + }; + + let initialize_params = build_initialize_params(config)?; + let startup = handle + .send_request("initialize", &initialize_params, config.startup_timeout) + .await; + if let Err(error) = startup { + handle.terminate().await; + return Err(map_worker_request_error(generation, error)); + } + + let initialized_params = json!({}); + let initialized_result = handle + .send_notification("initialized", &initialized_params) + .await + .map_err(|fault| { + handle_fault_notification(generation, "initialized notification failed", fault) + }); + if let Err(fault) = initialized_result { + handle.terminate().await; + return Err(fault); + } + + Ok(handle) +} + +fn map_worker_request_error(generation: Generation, error: WorkerRequestError) -> Fault { + match error { + WorkerRequestError::Fault(fault) => fault, + WorkerRequestError::Response(response) => Fault::new( + generation, + FaultClass::Protocol, + FaultCode::InvalidFrame, + FaultDetail::new(format!( + "initialize returned LSP error {}: {}", + response.code, response.message + )), + ), + } +} + +fn handle_fault_notification(generation: Generation, context: &'static str, fault: Fault) -> Fault { + let detail = FaultDetail::new(format!("{context}: {}", fault.detail.message)); + Fault::new(generation, fault.class, fault.code, detail) +} + +fn build_initialize_params(config: &EngineConfig) -> Result<Value, Fault> { + let root_uri = Url::from_directory_path(config.workspace_root.as_path()).map_err(|()| { + Fault::new( + Generation::genesis(), + FaultClass::Protocol, + FaultCode::InvalidFrame, + FaultDetail::new("workspace root cannot be represented as file URI"), + ) + })?; + let folder_name = config + .workspace_root + .as_path() + .file_name() + .and_then(|value| value.to_str()) + .unwrap_or("workspace") + .to_owned(); + let root_uri_string = root_uri.to_string(); + Ok(json!({ + "processId": std::process::id(), + "rootUri": root_uri_string.clone(), + "capabilities": build_client_capabilities(), + "workspaceFolders": [{ + "uri": root_uri_string, + "name": folder_name, + }], + "trace": "off", + "clientInfo": { + "name": "adequate-rust-mcp", + "version": env!("CARGO_PKG_VERSION"), + } + })) +} + +fn build_client_capabilities() -> Value { + let symbol_kind_values = (1_u32..=26).collect::<Vec<_>>(); + json!({ + "workspace": { + "applyEdit": true, + "workspaceEdit": { + "documentChanges": true, + "resourceOperations": ["create", "rename", "delete"], + }, + "symbol": { + "dynamicRegistration": false, + "resolveSupport": { + "properties": ["location.range", "containerName"], + }, + }, + "diagnostics": { + "refreshSupport": true, + }, + "executeCommand": { + "dynamicRegistration": false, + }, + "workspaceFolders": true, + "configuration": true, + }, + "textDocument": { + "synchronization": { + "dynamicRegistration": false, + "willSave": false, + "didSave": true, + "willSaveWaitUntil": false, + }, + "hover": { + "dynamicRegistration": false, + "contentFormat": ["markdown", "plaintext"], + }, + "definition": { + "dynamicRegistration": false, + "linkSupport": true, + }, + "declaration": { + "dynamicRegistration": false, + "linkSupport": true, + }, + "typeDefinition": { + "dynamicRegistration": false, + "linkSupport": true, + }, + "implementation": { + "dynamicRegistration": false, + "linkSupport": true, + }, + "references": { + "dynamicRegistration": false, + }, + "documentHighlight": { + "dynamicRegistration": false, + }, + "documentSymbol": { + "dynamicRegistration": false, + "hierarchicalDocumentSymbolSupport": true, + "symbolKind": { + "valueSet": symbol_kind_values, + }, + }, + "completion": { + "dynamicRegistration": false, + "contextSupport": true, + "completionItem": { + "snippetSupport": true, + "documentationFormat": ["markdown", "plaintext"], + "resolveSupport": { + "properties": ["documentation", "detail", "additionalTextEdits"], + }, + }, + }, + "signatureHelp": { + "dynamicRegistration": false, + }, + "codeAction": { + "dynamicRegistration": false, + "isPreferredSupport": true, + "codeActionLiteralSupport": { + "codeActionKind": { + "valueSet": [ + "", + "quickfix", + "refactor", + "refactor.extract", + "refactor.inline", + "refactor.rewrite", + "source", + "source.organizeImports", + ], + }, + }, + }, + "codeLens": { + "dynamicRegistration": false, + }, + "documentLink": { + "dynamicRegistration": false, + "tooltipSupport": true, + }, + "colorProvider": { + "dynamicRegistration": false, + }, + "linkedEditingRange": { + "dynamicRegistration": false, + }, + "rename": { + "dynamicRegistration": false, + "prepareSupport": true, + }, + "typeHierarchy": { + "dynamicRegistration": false, + }, + "inlineValue": { + "dynamicRegistration": false, + }, + "moniker": { + "dynamicRegistration": false, + }, + "diagnostic": { + "dynamicRegistration": false, + }, + "documentFormatting": { + "dynamicRegistration": false, + }, + "documentRangeFormatting": { + "dynamicRegistration": false, + }, + "documentOnTypeFormatting": { + "dynamicRegistration": false, + }, + "foldingRange": { + "dynamicRegistration": false, + }, + "selectionRange": { + "dynamicRegistration": false, + }, + "inlayHint": { + "dynamicRegistration": false, + "resolveSupport": { + "properties": ["tooltip", "textEdits", "label.tooltip", "label.location", "label.command"], + }, + }, + "semanticTokens": { + "dynamicRegistration": false, + "requests": { + "full": { + "delta": true, + }, + "range": true, + }, + "tokenTypes": [ + "namespace", "type", "class", "enum", "interface", "struct", "typeParameter", + "parameter", "variable", "property", "enumMember", "event", "function", + "method", "macro", "keyword", "modifier", "comment", "string", "number", + "regexp", "operator", + ], + "tokenModifiers": [ + "declaration", "definition", "readonly", "static", "deprecated", "abstract", + "async", "modification", "documentation", "defaultLibrary", + ], + "formats": ["relative"], + "multilineTokenSupport": true, + "overlappingTokenSupport": true, + }, + "publishDiagnostics": { + "relatedInformation": true, + }, + }, + "window": { + "workDoneProgress": true, + }, + "general": { + "positionEncodings": ["utf-8", "utf-16"], + }, + }) +} + +async fn stream_stderr(generation: Generation, stderr: tokio::process::ChildStderr) { + let mut reader = BufReader::new(stderr).lines(); + loop { + match reader.next_line().await { + Ok(Some(line)) => { + debug!( + generation = generation.get(), + "rust-analyzer stderr: {line}" + ); + } + Ok(None) => break, + Err(error) => { + debug!( + generation = generation.get(), + "rust-analyzer stderr stream failed: {error}" + ); + break; + } + } + } +} + +async fn read_stdout_loop( + generation: Generation, + stdout: ChildStdout, + pending: Arc<Mutex<HashMap<u64, oneshot::Sender<PendingOutcome>>>>, + terminal_fault_tx: watch::Sender<Option<Fault>>, +) { + let mut reader = BufReader::new(stdout); + loop { + match read_frame(&mut reader).await { + Ok(frame) => { + if let Err(fault) = dispatch_frame(generation, &pending, &frame).await { + emit_terminal_fault(&terminal_fault_tx, &pending, fault).await; + break; + } + } + Err(error) => { + let fault = classify_io_fault( + generation, + FaultClass::Transport, + "failed to read frame", + error, + ); + emit_terminal_fault(&terminal_fault_tx, &pending, fault).await; + break; + } + } + } +} + +async fn emit_terminal_fault( + terminal_fault_tx: &watch::Sender<Option<Fault>>, + pending: &Arc<Mutex<HashMap<u64, oneshot::Sender<PendingOutcome>>>>, + fault: Fault, +) { + if let Err(error) = terminal_fault_tx.send(Some(fault.clone())) { + warn!("failed to publish terminal fault: {error}"); + } + let mut pending_guard = pending.lock().await; + for sender in pending_guard.drain().map(|(_id, sender)| sender) { + if let Err(outcome) = sender.send(PendingOutcome::TransportFault(fault.clone())) { + drop(outcome); + } + } +} + +async fn dispatch_frame( + generation: Generation, + pending: &Arc<Mutex<HashMap<u64, oneshot::Sender<PendingOutcome>>>>, + frame: &[u8], +) -> Result<(), Fault> { + let value: Value = serde_json::from_slice(frame).map_err(|error| { + Fault::new( + generation, + FaultClass::Protocol, + FaultCode::InvalidJson, + FaultDetail::new(format!("failed to deserialize JSON-RPC frame: {error}")), + ) + })?; + + let response_id = value.get("id").and_then(Value::as_u64); + let Some(response_id) = response_id else { + return Ok(()); + }; + + let mut pending_guard = pending.lock().await; + let Some(sender) = pending_guard.remove(&response_id) else { + warn!( + generation = generation.get(), + response_id, "received response for unknown request id" + ); + return Ok(()); + }; + drop(pending_guard); + + if let Some(result) = value.get("result") { + if let Err(outcome) = sender.send(PendingOutcome::Result(result.clone())) { + drop(outcome); + } + return Ok(()); + } + + if let Some(error_value) = value.get("error") { + let error: RpcErrorPayload = + serde_json::from_value(error_value.clone()).map_err(|error| { + Fault::new( + generation, + FaultClass::Protocol, + FaultCode::InvalidJson, + FaultDetail::new(format!( + "failed to deserialize JSON-RPC error payload: {error}" + )), + ) + })?; + if let Err(outcome) = sender.send(PendingOutcome::ResponseError(error)) { + drop(outcome); + } + return Ok(()); + } + + Err(Fault::new( + generation, + FaultClass::Protocol, + FaultCode::InvalidFrame, + FaultDetail::new("response frame missing both result and error"), + )) +} + +async fn read_frame(reader: &mut BufReader<ChildStdout>) -> Result<Vec<u8>, io::Error> { + let mut content_length = None::<usize>; + loop { + let mut header_line = String::new(); + let bytes_read = reader.read_line(&mut header_line).await?; + if bytes_read == 0 { + return Err(io::Error::new( + io::ErrorKind::UnexpectedEof, + "EOF while reading headers", + )); + } + + if header_line == "\r\n" || header_line == "\n" { + break; + } + + let trimmed = header_line.trim_end_matches(['\r', '\n']); + if let Some(length) = trimmed.strip_prefix("Content-Length:") { + let parsed = length.trim().parse::<usize>().map_err(|parse_error| { + io::Error::new( + io::ErrorKind::InvalidData, + format!("invalid Content-Length header: {parse_error}"), + ) + })?; + content_length = Some(parsed); + } + } + + let length = content_length.ok_or_else(|| { + io::Error::new(io::ErrorKind::InvalidData, "missing Content-Length header") + })?; + + let mut payload = vec![0_u8; length]; + let _bytes_read = reader.read_exact(&mut payload).await?; + Ok(payload) +} + +async fn write_frame(writer: &mut ChildStdin, value: &Value) -> Result<(), io::Error> { + let payload = serde_json::to_vec(value).map_err(|error| { + io::Error::new( + io::ErrorKind::InvalidData, + format!("failed to serialize JSON-RPC payload: {error}"), + ) + })?; + let header = format!("Content-Length: {}\r\n\r\n", payload.len()); + writer.write_all(header.as_bytes()).await?; + writer.write_all(&payload).await?; + writer.flush().await?; + Ok(()) +} + +fn classify_io_fault( + generation: Generation, + class: FaultClass, + context: &'static str, + error: io::Error, +) -> Fault { + let code = match error.kind() { + io::ErrorKind::BrokenPipe => FaultCode::BrokenPipe, + io::ErrorKind::UnexpectedEof => FaultCode::UnexpectedEof, + _ => match class { + FaultClass::Process => FaultCode::SpawnFailed, + _ => FaultCode::InvalidFrame, + }, + }; + Fault::new( + generation, + class, + code, + FaultDetail::new(format!("{context}: {error}")), + ) +} diff --git a/crates/ra-mcp-engine/src/supervisor.rs b/crates/ra-mcp-engine/src/supervisor.rs new file mode 100644 index 0000000..f0c7ea6 --- /dev/null +++ b/crates/ra-mcp-engine/src/supervisor.rs @@ -0,0 +1,1257 @@ +use crate::{ + config::EngineConfig, + error::{EngineError, EngineResult}, + lsp_transport::{WorkerHandle, WorkerRequestError, spawn_worker}, +}; +use lsp_types::{ + DiagnosticSeverity, GotoDefinitionResponse, Hover, HoverContents, Location, LocationLink, + MarkedString, Position, Range, Uri, WorkspaceEdit, +}; +use ra_mcp_domain::{ + fault::{Fault, RecoveryDirective}, + lifecycle::{DynamicLifecycle, LifecycleSnapshot}, + types::{ + InvariantViolation, OneIndexedColumn, OneIndexedLine, SourceFilePath, SourceLocation, + SourcePoint, SourcePosition, SourceRange, + }, +}; +use serde::{Deserialize, Serialize, de::DeserializeOwned}; +use serde_json::Value; +use std::{ + cmp::min, + collections::HashMap, + fs, + sync::Arc, + time::{Duration, Instant, SystemTime}, +}; +use tokio::{sync::Mutex, time::sleep}; +use tracing::{debug, warn}; +use url::Url; + +/// Hover response payload. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct HoverPayload { + /// Rendered markdown/text content, if available. + pub rendered: Option<String>, + /// Symbol range, if rust-analyzer provided one. + pub range: Option<SourceRange>, +} + +/// Diagnostic severity level. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum DiagnosticLevel { + /// Error severity. + Error, + /// Warning severity. + Warning, + /// Informational severity. + Information, + /// Hint severity. + Hint, +} + +/// One diagnostic record. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct DiagnosticEntry { + /// Affected range. + pub range: SourceRange, + /// Severity. + pub level: DiagnosticLevel, + /// Optional diagnostic code. + pub code: Option<String>, + /// User-facing diagnostic message. + pub message: String, +} + +/// Diagnostics report for a single file. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct DiagnosticsReport { + /// Entries returned by rust-analyzer. + pub diagnostics: Vec<DiagnosticEntry>, +} + +/// Summary of rename operation impact. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct RenameReport { + /// Number of files touched by the edit. + pub files_touched: u64, + /// Number of text edits in total. + pub edits_applied: u64, +} + +/// Aggregate runtime telemetry snapshot for engine behavior. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct TelemetrySnapshot { + /// Process uptime in milliseconds. + pub uptime_ms: u64, + /// Current lifecycle snapshot. + pub lifecycle: LifecycleSnapshot, + /// Number of consecutive failures currently tracked by supervisor. + pub consecutive_failures: u32, + /// Number of worker restarts performed. + pub restart_count: u64, + /// Global counters across all requests. + pub totals: TelemetryTotals, + /// Per-method counters and latency aggregates. + pub methods: Vec<MethodTelemetrySnapshot>, + /// Last fault that triggered worker restart, if any. + pub last_fault: Option<Fault>, +} + +/// Total request/fault counters. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct TelemetryTotals { + /// Total request attempts issued to rust-analyzer. + pub request_count: u64, + /// Successful request attempts. + pub success_count: u64, + /// LSP response error attempts. + pub response_error_count: u64, + /// Transport/protocol fault attempts. + pub transport_fault_count: u64, + /// Retry attempts performed. + pub retry_count: u64, +} + +/// Per-method telemetry aggregate. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct MethodTelemetrySnapshot { + /// LSP method name. + pub method: String, + /// Total request attempts for this method. + pub request_count: u64, + /// Successful attempts. + pub success_count: u64, + /// LSP response error attempts. + pub response_error_count: u64, + /// Transport/protocol fault attempts. + pub transport_fault_count: u64, + /// Retry attempts for this method. + pub retry_count: u64, + /// Last observed attempt latency in milliseconds. + pub last_latency_ms: Option<u64>, + /// Maximum observed attempt latency in milliseconds. + pub max_latency_ms: u64, + /// Average attempt latency in milliseconds. + pub avg_latency_ms: u64, + /// Last error detail for this method, if any. + pub last_error: Option<String>, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum RequestMethod { + Hover, + Definition, + References, + Rename, + DocumentDiagnostic, + Raw(&'static str), +} + +impl RequestMethod { + const fn as_lsp_method(self) -> &'static str { + match self { + Self::Hover => "textDocument/hover", + Self::Definition => "textDocument/definition", + Self::References => "textDocument/references", + Self::Rename => "textDocument/rename", + Self::DocumentDiagnostic => "textDocument/diagnostic", + Self::Raw(method) => method, + } + } + + fn retry_delay(self, payload: &crate::lsp_transport::RpcErrorPayload) -> Option<Duration> { + if self.supports_transient_response_retry() + && is_transient_response_error(payload.code, payload.message.as_str()) + { + return Some(self.transient_response_retry_delay()); + } + let retryable_method = matches!( + self.as_lsp_method(), + "textDocument/rename" + | "textDocument/prepareRename" + | "textDocument/definition" + | "textDocument/references" + ); + if !retryable_method + || payload.code != -32602 + || !payload.message.contains("No references found at position") + { + return None; + } + match self.as_lsp_method() { + "textDocument/rename" | "textDocument/prepareRename" => { + Some(Duration::from_millis(1500)) + } + _ => Some(Duration::from_millis(250)), + } + } + + const fn supports_transient_response_retry(self) -> bool { + matches!( + self, + Self::Hover + | Self::Definition + | Self::References + | Self::Rename + | Self::DocumentDiagnostic + ) + } + + fn transient_response_retry_delay(self) -> Duration { + match self { + Self::DocumentDiagnostic => Duration::from_millis(250), + Self::Rename => Duration::from_millis(350), + Self::Hover | Self::Definition | Self::References => Duration::from_millis(150), + Self::Raw(_) => Duration::from_millis(0), + } + } +} + +fn is_transient_response_error(code: i64, message: &str) -> bool { + let normalized = message.to_ascii_lowercase(); + code == -32801 + || code == -32802 + || normalized.contains("content modified") + || normalized.contains("document changed") + || normalized.contains("server cancelled") + || normalized.contains("request cancelled") + || normalized.contains("request canceled") +} + +#[derive(Debug, Clone, Serialize)] +struct TextDocumentIdentifierWire { + uri: String, +} + +#[derive(Debug, Clone, Copy, Serialize)] +struct PositionWire { + line: u32, + character: u32, +} + +impl From<SourcePoint> for PositionWire { + fn from(value: SourcePoint) -> Self { + Self { + line: value.line().to_zero_indexed(), + character: value.column().to_zero_indexed(), + } + } +} + +#[derive(Debug, Clone, Serialize)] +struct TextDocumentPositionParamsWire { + #[serde(rename = "textDocument")] + text_document: TextDocumentIdentifierWire, + position: PositionWire, +} + +#[derive(Debug, Clone, Serialize)] +struct ReferencesContextWire { + #[serde(rename = "includeDeclaration")] + include_declaration: bool, +} + +#[derive(Debug, Clone, Serialize)] +struct ReferencesParamsWire { + #[serde(rename = "textDocument")] + text_document: TextDocumentIdentifierWire, + position: PositionWire, + context: ReferencesContextWire, +} + +#[derive(Debug, Clone, Serialize)] +struct RenameParamsWire { + #[serde(rename = "textDocument")] + text_document: TextDocumentIdentifierWire, + position: PositionWire, + #[serde(rename = "newName")] + new_name: String, +} + +#[derive(Debug, Clone, Serialize)] +struct DocumentDiagnosticParamsWire { + #[serde(rename = "textDocument")] + text_document: TextDocumentIdentifierWire, +} + +#[derive(Debug, Clone, Serialize)] +struct VersionedTextDocumentIdentifierWire { + uri: String, + version: i32, +} + +#[derive(Debug, Clone, Serialize)] +struct TextDocumentContentChangeEventWire { + text: String, +} + +#[derive(Debug, Clone, Serialize)] +struct DidChangeTextDocumentParamsWire { + #[serde(rename = "textDocument")] + text_document: VersionedTextDocumentIdentifierWire, + #[serde(rename = "contentChanges")] + content_changes: Vec<TextDocumentContentChangeEventWire>, +} + +#[derive(Debug, Clone, Serialize)] +struct TextDocumentItemWire { + uri: String, + #[serde(rename = "languageId")] + language_id: &'static str, + version: i32, + text: String, +} + +#[derive(Debug, Clone, Serialize)] +struct DidOpenTextDocumentParamsWire { + #[serde(rename = "textDocument")] + text_document: TextDocumentItemWire, +} + +/// Resilient engine façade. +#[derive(Clone)] +pub struct Engine { + supervisor: Arc<Mutex<Supervisor>>, +} + +struct Supervisor { + config: EngineConfig, + lifecycle: DynamicLifecycle, + worker: Option<WorkerHandle>, + consecutive_failures: u32, + open_documents: HashMap<SourceFilePath, OpenDocumentState>, + telemetry: TelemetryState, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct OpenDocumentState { + version: i32, + fingerprint: SourceFileFingerprint, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct SourceFileFingerprint { + byte_len: u64, + modified_nanos_since_epoch: u128, +} + +#[derive(Debug)] +struct TelemetryState { + started_at: Instant, + totals: TelemetryTotalsState, + methods: HashMap<&'static str, MethodTelemetryState>, + restart_count: u64, + last_fault: Option<Fault>, +} + +#[derive(Debug, Default)] +struct TelemetryTotalsState { + request_count: u64, + success_count: u64, + response_error_count: u64, + transport_fault_count: u64, + retry_count: u64, +} + +#[derive(Debug, Default)] +struct MethodTelemetryState { + request_count: u64, + success_count: u64, + response_error_count: u64, + transport_fault_count: u64, + retry_count: u64, + total_latency_ms: u128, + last_latency_ms: Option<u64>, + max_latency_ms: u64, + last_error: Option<String>, +} + +impl Engine { + /// Creates a new engine. + #[must_use] + pub fn new(config: EngineConfig) -> Self { + Self { + supervisor: Arc::new(Mutex::new(Supervisor::new(config))), + } + } + + /// Returns current lifecycle snapshot. + pub async fn lifecycle_snapshot(&self) -> LifecycleSnapshot { + let supervisor = self.supervisor.lock().await; + supervisor.snapshot() + } + + /// Returns aggregate request/fault telemetry snapshot. + pub async fn telemetry_snapshot(&self) -> TelemetrySnapshot { + let supervisor = self.supervisor.lock().await; + supervisor.telemetry_snapshot() + } + + /// Executes hover request. + pub async fn hover(&self, position: SourcePosition) -> EngineResult<HoverPayload> { + let document_hint = Some(position.file_path().clone()); + let request = text_document_position_params(&position)?; + let hover = self + .issue_typed_request::<_, Option<Hover>>(RequestMethod::Hover, &request, document_hint) + .await?; + let payload = hover + .map(|hover| -> Result<HoverPayload, EngineError> { + let range = hover + .range + .map(|range| range_to_source_range(position.file_path(), range)) + .transpose()?; + Ok(HoverPayload { + rendered: Some(render_hover_contents(hover.contents)), + range, + }) + }) + .transpose()? + .unwrap_or(HoverPayload { + rendered: None, + range: None, + }); + Ok(payload) + } + + /// Executes definition request. + pub async fn definition(&self, position: SourcePosition) -> EngineResult<Vec<SourceLocation>> { + let document_hint = Some(position.file_path().clone()); + let request = text_document_position_params(&position)?; + let parsed = self + .issue_typed_request::<_, Option<GotoDefinitionResponse>>( + RequestMethod::Definition, + &request, + document_hint, + ) + .await?; + let locations = match parsed { + None => Vec::new(), + Some(GotoDefinitionResponse::Scalar(location)) => { + vec![source_location_from_lsp_location(location)?] + } + Some(GotoDefinitionResponse::Array(locations)) => locations + .into_iter() + .map(source_location_from_lsp_location) + .collect::<Result<Vec<_>, _>>()?, + Some(GotoDefinitionResponse::Link(links)) => links + .into_iter() + .map(source_location_from_lsp_link) + .collect::<Result<Vec<_>, _>>()?, + }; + Ok(locations) + } + + /// Executes references request. + pub async fn references(&self, position: SourcePosition) -> EngineResult<Vec<SourceLocation>> { + let request = ReferencesParamsWire { + text_document: text_document_identifier(position.file_path())?, + position: PositionWire::from(position.point()), + context: ReferencesContextWire { + include_declaration: true, + }, + }; + let parsed = self + .issue_typed_request::<_, Option<Vec<Location>>>( + RequestMethod::References, + &request, + Some(position.file_path().clone()), + ) + .await?; + parsed + .unwrap_or_default() + .into_iter() + .map(source_location_from_lsp_location) + .collect::<Result<Vec<_>, _>>() + } + + /// Executes rename request. + pub async fn rename_symbol( + &self, + position: SourcePosition, + new_name: String, + ) -> EngineResult<RenameReport> { + let request = RenameParamsWire { + text_document: text_document_identifier(position.file_path())?, + position: PositionWire::from(position.point()), + new_name, + }; + let edit = self + .issue_typed_request::<_, WorkspaceEdit>( + RequestMethod::Rename, + &request, + Some(position.file_path().clone()), + ) + .await?; + Ok(summarize_workspace_edit(edit)) + } + + /// Executes document diagnostics request. + pub async fn diagnostics(&self, file_path: SourceFilePath) -> EngineResult<DiagnosticsReport> { + let request = DocumentDiagnosticParamsWire { + text_document: text_document_identifier(&file_path)?, + }; + let response = self + .issue_request( + RequestMethod::DocumentDiagnostic, + &request, + Some(file_path.clone()), + ) + .await?; + parse_diagnostics_report(&file_path, response) + } + + /// Executes an arbitrary typed LSP request and returns raw JSON payload. + pub async fn raw_lsp_request( + &self, + method: &'static str, + params: Value, + ) -> EngineResult<Value> { + let document_hint = source_file_path_hint_from_request_params(¶ms)?; + self.issue_request(RequestMethod::Raw(method), ¶ms, document_hint) + .await + } + + async fn issue_typed_request<P, R>( + &self, + method: RequestMethod, + params: &P, + document_hint: Option<SourceFilePath>, + ) -> EngineResult<R> + where + P: Serialize, + R: DeserializeOwned, + { + let response = self.issue_request(method, params, document_hint).await?; + serde_json::from_value::<R>(response).map_err(|error| EngineError::InvalidPayload { + method: method.as_lsp_method(), + message: error.to_string(), + }) + } + + async fn issue_request<P>( + &self, + method: RequestMethod, + params: &P, + document_hint: Option<SourceFilePath>, + ) -> EngineResult<Value> + where + P: Serialize, + { + let max_attempts = 2_u8; + let mut attempt = 0_u8; + while attempt < max_attempts { + attempt = attempt.saturating_add(1); + let (worker, request_timeout) = { + let mut supervisor = self.supervisor.lock().await; + let worker = supervisor.ensure_worker().await?; + if let Some(file_path) = document_hint.as_ref() { + supervisor.synchronize_document(&worker, file_path).await?; + } + (worker, supervisor.request_timeout()) + }; + + let attempt_started_at = Instant::now(); + let result = worker + .send_request(method.as_lsp_method(), params, request_timeout) + .await; + let latency = attempt_started_at.elapsed(); + match result { + Ok(value) => { + let mut supervisor = self.supervisor.lock().await; + supervisor.record_success(method.as_lsp_method(), latency); + return Ok(value); + } + Err(WorkerRequestError::Response(payload)) => { + let retry_delay = (attempt < max_attempts) + .then(|| method.retry_delay(&payload)) + .flatten(); + let should_retry = retry_delay.is_some(); + { + let mut supervisor = self.supervisor.lock().await; + supervisor.record_response_error( + method.as_lsp_method(), + latency, + payload.code, + format_lsp_response_error_detail(&payload), + should_retry, + ); + } + + if let Some(retry_delay) = retry_delay { + debug!( + attempt, + method = method.as_lsp_method(), + code = payload.code, + delay_ms = retry_delay.as_millis(), + "retrying request after transient lsp response error" + ); + sleep(retry_delay).await; + continue; + } + return Err(EngineError::from(payload)); + } + Err(WorkerRequestError::Fault(fault)) => { + let directive = fault.directive(); + let will_retry = matches!( + directive, + RecoveryDirective::RetryInPlace | RecoveryDirective::RestartAndReplay + ) && attempt < max_attempts; + { + let mut supervisor = self.supervisor.lock().await; + supervisor.record_transport_fault( + method.as_lsp_method(), + latency, + fault.detail.message.clone(), + will_retry, + ); + } + + match directive { + RecoveryDirective::RetryInPlace => { + debug!( + attempt, + method = method.as_lsp_method(), + "retrying request in-place after fault" + ); + if attempt >= max_attempts { + return Err(EngineError::Fault(fault)); + } + } + RecoveryDirective::RestartAndReplay => { + let mut supervisor = self.supervisor.lock().await; + supervisor.record_fault(fault.clone()).await?; + if attempt >= max_attempts { + return Err(EngineError::Fault(fault)); + } + debug!( + attempt, + method = method.as_lsp_method(), + "restarting worker and replaying request" + ); + } + RecoveryDirective::AbortRequest => { + let mut supervisor = self.supervisor.lock().await; + supervisor.record_fault(fault.clone()).await?; + return Err(EngineError::Fault(fault)); + } + } + } + } + } + Err(EngineError::Fault(Fault::new( + self.lifecycle_generation().await, + ra_mcp_domain::fault::FaultClass::Resource, + ra_mcp_domain::fault::FaultCode::RequestTimedOut, + ra_mcp_domain::fault::FaultDetail::new(format!( + "exhausted retries for method {}", + method.as_lsp_method() + )), + ))) + } + + async fn lifecycle_generation(&self) -> ra_mcp_domain::types::Generation { + let supervisor = self.supervisor.lock().await; + supervisor.generation() + } +} + +impl TelemetryState { + fn new() -> Self { + Self { + started_at: Instant::now(), + totals: TelemetryTotalsState::default(), + methods: HashMap::new(), + restart_count: 0, + last_fault: None, + } + } + + fn record_success(&mut self, method: &'static str, latency: Duration) { + self.totals.request_count = self.totals.request_count.saturating_add(1); + self.totals.success_count = self.totals.success_count.saturating_add(1); + let entry = self.methods.entry(method).or_default(); + entry.request_count = entry.request_count.saturating_add(1); + entry.success_count = entry.success_count.saturating_add(1); + entry.record_latency(latency); + entry.last_error = None; + } + + fn record_response_error( + &mut self, + method: &'static str, + latency: Duration, + detail: String, + retry_performed: bool, + ) { + self.totals.request_count = self.totals.request_count.saturating_add(1); + self.totals.response_error_count = self.totals.response_error_count.saturating_add(1); + if retry_performed { + self.totals.retry_count = self.totals.retry_count.saturating_add(1); + } + + let entry = self.methods.entry(method).or_default(); + entry.request_count = entry.request_count.saturating_add(1); + entry.response_error_count = entry.response_error_count.saturating_add(1); + if retry_performed { + entry.retry_count = entry.retry_count.saturating_add(1); + } + entry.record_latency(latency); + entry.last_error = Some(detail); + } + + fn record_transport_fault( + &mut self, + method: &'static str, + latency: Duration, + detail: String, + retry_performed: bool, + ) { + self.totals.request_count = self.totals.request_count.saturating_add(1); + self.totals.transport_fault_count = self.totals.transport_fault_count.saturating_add(1); + if retry_performed { + self.totals.retry_count = self.totals.retry_count.saturating_add(1); + } + + let entry = self.methods.entry(method).or_default(); + entry.request_count = entry.request_count.saturating_add(1); + entry.transport_fault_count = entry.transport_fault_count.saturating_add(1); + if retry_performed { + entry.retry_count = entry.retry_count.saturating_add(1); + } + entry.record_latency(latency); + entry.last_error = Some(detail); + } + + fn record_restart(&mut self, fault: Fault) { + self.restart_count = self.restart_count.saturating_add(1); + self.last_fault = Some(fault); + } + + fn snapshot( + &self, + lifecycle: LifecycleSnapshot, + consecutive_failures: u32, + ) -> TelemetrySnapshot { + let mut methods = self + .methods + .iter() + .map(|(method, entry)| MethodTelemetrySnapshot { + method: (*method).to_owned(), + request_count: entry.request_count, + success_count: entry.success_count, + response_error_count: entry.response_error_count, + transport_fault_count: entry.transport_fault_count, + retry_count: entry.retry_count, + last_latency_ms: entry.last_latency_ms, + max_latency_ms: entry.max_latency_ms, + avg_latency_ms: entry.average_latency_ms(), + last_error: entry.last_error.clone(), + }) + .collect::<Vec<_>>(); + methods.sort_by(|left, right| left.method.cmp(&right.method)); + + let uptime_ms = duration_millis_u64(self.started_at.elapsed()); + TelemetrySnapshot { + uptime_ms, + lifecycle, + consecutive_failures, + restart_count: self.restart_count, + totals: TelemetryTotals { + request_count: self.totals.request_count, + success_count: self.totals.success_count, + response_error_count: self.totals.response_error_count, + transport_fault_count: self.totals.transport_fault_count, + retry_count: self.totals.retry_count, + }, + methods, + last_fault: self.last_fault.clone(), + } + } +} + +impl MethodTelemetryState { + fn record_latency(&mut self, latency: Duration) { + let latency_ms = duration_millis_u64(latency); + self.last_latency_ms = Some(latency_ms); + self.max_latency_ms = self.max_latency_ms.max(latency_ms); + self.total_latency_ms = self.total_latency_ms.saturating_add(latency_ms as u128); + } + + fn average_latency_ms(&self) -> u64 { + if self.request_count == 0 { + return 0; + } + let avg = self.total_latency_ms / u128::from(self.request_count); + if avg > u128::from(u64::MAX) { + u64::MAX + } else { + avg as u64 + } + } +} + +fn duration_millis_u64(duration: Duration) -> u64 { + let millis = duration.as_millis(); + if millis > u128::from(u64::MAX) { + u64::MAX + } else { + millis as u64 + } +} + +impl Supervisor { + fn new(config: EngineConfig) -> Self { + Self { + config, + lifecycle: DynamicLifecycle::cold(), + worker: None, + consecutive_failures: 0, + open_documents: HashMap::new(), + telemetry: TelemetryState::new(), + } + } + + fn request_timeout(&self) -> Duration { + self.config.request_timeout + } + + async fn synchronize_document( + &mut self, + worker: &WorkerHandle, + file_path: &SourceFilePath, + ) -> EngineResult<()> { + let fingerprint = capture_source_file_fingerprint(file_path)?; + if let Some(existing) = self.open_documents.get_mut(file_path) { + if existing.fingerprint == fingerprint { + return Ok(()); + } + let text = fs::read_to_string(file_path.as_path())?; + let next_version = existing.version.saturating_add(1); + let params = DidChangeTextDocumentParamsWire { + text_document: VersionedTextDocumentIdentifierWire { + uri: file_uri_string_from_source_path(file_path)?, + version: next_version, + }, + content_changes: vec![TextDocumentContentChangeEventWire { text }], + }; + worker + .send_notification("textDocument/didChange", ¶ms) + .await + .map_err(EngineError::from)?; + existing.version = next_version; + existing.fingerprint = fingerprint; + return Ok(()); + } + + let text = fs::read_to_string(file_path.as_path())?; + let params = DidOpenTextDocumentParamsWire { + text_document: TextDocumentItemWire { + uri: file_uri_string_from_source_path(file_path)?, + language_id: "rust", + version: 1, + text, + }, + }; + worker + .send_notification("textDocument/didOpen", ¶ms) + .await + .map_err(EngineError::from)?; + let _previous = self.open_documents.insert( + file_path.clone(), + OpenDocumentState { + version: 1, + fingerprint, + }, + ); + Ok(()) + } + + fn snapshot(&self) -> LifecycleSnapshot { + self.lifecycle.snapshot() + } + + fn telemetry_snapshot(&self) -> TelemetrySnapshot { + let lifecycle = self.snapshot(); + self.telemetry + .snapshot(lifecycle, self.consecutive_failures) + } + + fn generation(&self) -> ra_mcp_domain::types::Generation { + let snapshot = self.snapshot(); + match snapshot { + LifecycleSnapshot::Cold { generation } + | LifecycleSnapshot::Starting { generation } + | LifecycleSnapshot::Ready { generation } + | LifecycleSnapshot::Recovering { generation, .. } => generation, + } + } + + async fn ensure_worker(&mut self) -> EngineResult<WorkerHandle> { + if let Some(worker) = self.worker.clone() { + if let Some(fault) = worker.terminal_fault() { + warn!( + generation = fault.generation.get(), + "worker marked terminal, recycling" + ); + self.record_fault(fault).await?; + } else { + return Ok(worker); + } + } + self.spawn_worker().await + } + + async fn spawn_worker(&mut self) -> EngineResult<WorkerHandle> { + self.lifecycle = self.lifecycle.clone().begin_startup()?; + let generation = self.generation(); + let started = spawn_worker(&self.config, generation).await; + match started { + Ok(worker) => { + self.lifecycle = self.lifecycle.clone().complete_startup()?; + self.worker = Some(worker.clone()); + self.consecutive_failures = 0; + self.open_documents.clear(); + Ok(worker) + } + Err(fault) => { + self.record_fault(fault.clone()).await?; + Err(EngineError::Fault(fault)) + } + } + } + + async fn record_fault(&mut self, fault: Fault) -> EngineResult<()> { + self.lifecycle = fracture_or_force_recovery(self.lifecycle.clone(), fault.clone())?; + self.consecutive_failures = self.consecutive_failures.saturating_add(1); + self.telemetry.record_restart(fault.clone()); + + if let Some(worker) = self.worker.take() { + worker.terminate().await; + } + self.open_documents.clear(); + + let delay = self.next_backoff_delay(); + debug!( + failures = self.consecutive_failures, + delay_ms = delay.as_millis(), + "applying restart backoff delay" + ); + sleep(delay).await; + Ok(()) + } + + fn record_success(&mut self, method: &'static str, latency: Duration) { + self.consecutive_failures = 0; + self.telemetry.record_success(method, latency); + } + + fn record_response_error( + &mut self, + method: &'static str, + latency: Duration, + code: i64, + message: String, + retry_performed: bool, + ) { + let detail = format!("code={code} message={message}"); + self.telemetry + .record_response_error(method, latency, detail, retry_performed); + } + + fn record_transport_fault( + &mut self, + method: &'static str, + latency: Duration, + detail: String, + retry_performed: bool, + ) { + self.telemetry + .record_transport_fault(method, latency, detail, retry_performed); + } + + fn next_backoff_delay(&self) -> Duration { + let exponent = self.consecutive_failures.saturating_sub(1); + let multiplier = if exponent >= 31 { + u32::MAX + } else { + 1_u32 << exponent + }; + let scaled = self.config.backoff_policy.floor.saturating_mul(multiplier); + min(scaled, self.config.backoff_policy.ceiling) + } +} + +fn fracture_or_force_recovery( + lifecycle: DynamicLifecycle, + fault: Fault, +) -> EngineResult<DynamicLifecycle> { + match lifecycle.clone().fracture(fault.clone()) { + Ok(next) => Ok(next), + Err(_error) => { + let started = lifecycle.begin_startup()?; + started.fracture(fault).map_err(EngineError::from) + } + } +} + +fn text_document_identifier( + file_path: &SourceFilePath, +) -> EngineResult<TextDocumentIdentifierWire> { + Ok(TextDocumentIdentifierWire { + uri: file_uri_string_from_source_path(file_path)?, + }) +} + +fn text_document_position_params( + position: &SourcePosition, +) -> EngineResult<TextDocumentPositionParamsWire> { + Ok(TextDocumentPositionParamsWire { + text_document: text_document_identifier(position.file_path())?, + position: PositionWire::from(position.point()), + }) +} + +fn format_lsp_response_error_detail(payload: &crate::lsp_transport::RpcErrorPayload) -> String { + let crate::lsp_transport::RpcErrorPayload { + code, + message, + data, + } = payload; + match data { + Some(data) => format!("code={code} message={message} data={data}"), + None => format!("code={code} message={message}"), + } +} + +fn file_uri_string_from_source_path(file_path: &SourceFilePath) -> EngineResult<String> { + let file_url = + Url::from_file_path(file_path.as_path()).map_err(|()| EngineError::InvalidFileUrl)?; + Ok(file_url.to_string()) +} + +fn source_file_path_hint_from_request_params( + params: &Value, +) -> EngineResult<Option<SourceFilePath>> { + let maybe_uri = params + .get("textDocument") + .and_then(Value::as_object) + .and_then(|document| document.get("uri")) + .and_then(Value::as_str); + let Some(uri) = maybe_uri else { + return Ok(None); + }; + let file_path = source_file_path_from_file_uri_str(uri)?; + Ok(Some(file_path)) +} + +fn source_file_path_from_file_uri_str(uri: &str) -> EngineResult<SourceFilePath> { + let file_url = Url::parse(uri).map_err(|_error| EngineError::InvalidFileUrl)?; + let file_path = file_url + .to_file_path() + .map_err(|()| EngineError::InvalidFileUrl)?; + SourceFilePath::try_new(file_path).map_err(EngineError::from) +} + +fn capture_source_file_fingerprint( + file_path: &SourceFilePath, +) -> EngineResult<SourceFileFingerprint> { + let metadata = fs::metadata(file_path.as_path())?; + let modified = metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH); + let modified_nanos_since_epoch = modified + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or(Duration::ZERO) + .as_nanos(); + Ok(SourceFileFingerprint { + byte_len: metadata.len(), + modified_nanos_since_epoch, + }) +} + +fn source_location_from_lsp_link(link: LocationLink) -> EngineResult<SourceLocation> { + let uri = link.target_uri; + let range = link.target_selection_range; + source_location_from_uri_and_position(uri, range.start) +} + +fn source_location_from_lsp_location(location: Location) -> EngineResult<SourceLocation> { + source_location_from_uri_and_position(location.uri, location.range.start) +} + +fn source_location_from_uri_and_position( + uri: Uri, + position: Position, +) -> EngineResult<SourceLocation> { + let file_url = Url::parse(uri.as_str()).map_err(|_error| EngineError::InvalidFileUrl)?; + let path = file_url + .to_file_path() + .map_err(|()| EngineError::InvalidFileUrl)?; + let file_path = SourceFilePath::try_new(path)?; + let point = SourcePoint::new( + OneIndexedLine::try_new(u64::from(position.line).saturating_add(1))?, + OneIndexedColumn::try_new(u64::from(position.character).saturating_add(1))?, + ); + Ok(SourceLocation::new(file_path, point)) +} + +fn range_to_source_range( + file_path: &SourceFilePath, + range: Range, +) -> Result<SourceRange, InvariantViolation> { + let start = SourcePoint::new( + OneIndexedLine::try_new(u64::from(range.start.line).saturating_add(1))?, + OneIndexedColumn::try_new(u64::from(range.start.character).saturating_add(1))?, + ); + let end = SourcePoint::new( + OneIndexedLine::try_new(u64::from(range.end.line).saturating_add(1))?, + OneIndexedColumn::try_new(u64::from(range.end.character).saturating_add(1))?, + ); + SourceRange::try_new(file_path.clone(), start, end) +} + +fn render_hover_contents(contents: HoverContents) -> String { + match contents { + HoverContents::Scalar(marked_string) => marked_string_to_string(marked_string), + HoverContents::Array(items) => items + .into_iter() + .map(marked_string_to_string) + .collect::<Vec<_>>() + .join("\n"), + HoverContents::Markup(markup) => markup.value, + } +} + +fn marked_string_to_string(marked: MarkedString) -> String { + match marked { + MarkedString::String(value) => value, + MarkedString::LanguageString(language_string) => { + format!( + "```{}\n{}\n```", + language_string.language, language_string.value + ) + } + } +} + +fn summarize_workspace_edit(edit: WorkspaceEdit) -> RenameReport { + let mut touched = HashMap::<String, u64>::new(); + let mut edits_applied = 0_u64; + + if let Some(changes) = edit.changes { + for (uri, edits) in changes { + let edit_count = u64::try_from(edits.len()).unwrap_or(u64::MAX); + let _previous = touched.insert(uri.as_str().to_owned(), edit_count); + edits_applied = edits_applied.saturating_add(edit_count); + } + } + + if let Some(document_changes) = edit.document_changes { + match document_changes { + lsp_types::DocumentChanges::Edits(edits) => { + for document_edit in edits { + let uri = document_edit.text_document.uri; + let edit_count = u64::try_from(document_edit.edits.len()).unwrap_or(u64::MAX); + let _entry = touched + .entry(uri.as_str().to_owned()) + .and_modify(|count| *count = count.saturating_add(edit_count)) + .or_insert(edit_count); + edits_applied = edits_applied.saturating_add(edit_count); + } + } + lsp_types::DocumentChanges::Operations(operations) => { + edits_applied = edits_applied + .saturating_add(u64::try_from(operations.len()).unwrap_or(u64::MAX)); + for operation in operations { + match operation { + lsp_types::DocumentChangeOperation::Op(operation) => match operation { + lsp_types::ResourceOp::Create(create) => { + let _entry = + touched.entry(create.uri.as_str().to_owned()).or_insert(0); + } + lsp_types::ResourceOp::Rename(rename) => { + let _entry = touched + .entry(rename.new_uri.as_str().to_owned()) + .or_insert(0); + } + lsp_types::ResourceOp::Delete(delete) => { + let _entry = + touched.entry(delete.uri.as_str().to_owned()).or_insert(0); + } + }, + lsp_types::DocumentChangeOperation::Edit(edit) => { + let edit_count = u64::try_from(edit.edits.len()).unwrap_or(u64::MAX); + let _entry = touched + .entry(edit.text_document.uri.as_str().to_owned()) + .and_modify(|count| *count = count.saturating_add(edit_count)) + .or_insert(edit_count); + } + } + } + } + } + } + + RenameReport { + files_touched: u64::try_from(touched.len()).unwrap_or(u64::MAX), + edits_applied, + } +} + +#[derive(Debug, Deserialize)] +#[serde(tag = "kind", rename_all = "lowercase")] +enum DiagnosticReportWire { + Full { items: Vec<DiagnosticWire> }, + Unchanged {}, +} + +#[derive(Debug, Deserialize)] +struct DiagnosticWire { + range: Range, + severity: Option<DiagnosticSeverity>, + code: Option<Value>, + message: String, +} + +fn parse_diagnostics_report( + file_path: &SourceFilePath, + value: Value, +) -> EngineResult<DiagnosticsReport> { + let parsed = serde_json::from_value::<DiagnosticReportWire>(value).map_err(|error| { + EngineError::InvalidPayload { + method: "textDocument/diagnostic", + message: error.to_string(), + } + })?; + match parsed { + DiagnosticReportWire::Unchanged {} => Ok(DiagnosticsReport { + diagnostics: Vec::new(), + }), + DiagnosticReportWire::Full { items } => { + let diagnostics = items + .into_iter() + .map(|item| { + let range = range_to_source_range(file_path, item.range)?; + let level = match item.severity.unwrap_or(DiagnosticSeverity::INFORMATION) { + DiagnosticSeverity::ERROR => DiagnosticLevel::Error, + DiagnosticSeverity::WARNING => DiagnosticLevel::Warning, + DiagnosticSeverity::INFORMATION => DiagnosticLevel::Information, + DiagnosticSeverity::HINT => DiagnosticLevel::Hint, + _ => DiagnosticLevel::Information, + }; + let code = item.code.map(|value| match value { + Value::String(message) => message, + Value::Number(number) => number.to_string(), + other => other.to_string(), + }); + Ok(DiagnosticEntry { + range, + level, + code, + message: item.message, + }) + }) + .collect::<Result<Vec<_>, InvariantViolation>>()?; + Ok(DiagnosticsReport { diagnostics }) + } + } +} diff --git a/crates/ra-mcp-engine/tests/engine_recovery.rs b/crates/ra-mcp-engine/tests/engine_recovery.rs new file mode 100644 index 0000000..a7f2db8 --- /dev/null +++ b/crates/ra-mcp-engine/tests/engine_recovery.rs @@ -0,0 +1,353 @@ +//! Integration tests for engine restart and transport recovery. + +use lsp_types as _; +use ra_mcp_domain::{ + lifecycle::LifecycleSnapshot, + types::{ + OneIndexedColumn, OneIndexedLine, SourceFilePath, SourcePoint, SourcePosition, + WorkspaceRoot, + }, +}; +use ra_mcp_engine::{BackoffPolicy, Engine, EngineConfig, EngineError}; +use serde as _; +use serde_json::{self, json}; +use serial_test::serial; +use std::{error::Error, fs, path::PathBuf, time::Duration}; +use tempfile::TempDir; +use thiserror as _; +use tracing as _; +use url as _; + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn stable_fake_server_handles_core_requests() -> Result<(), Box<dyn Error>> { + let fixture = make_fixture()?; + let config = make_engine_config(&fixture, vec!["--mode".into(), "stable".into()])?; + let engine = Engine::new(config); + let position = fixture.position()?; + + let hover = engine.hover(position.clone()).await?; + assert_eq!(hover.rendered.as_deref(), Some("hover::ok")); + + let definitions = engine.definition(position.clone()).await?; + assert_eq!(definitions.len(), 1); + assert_eq!(definitions[0].line().get(), 3); + assert_eq!(definitions[0].column().get(), 4); + + let references = engine.references(position.clone()).await?; + assert_eq!(references.len(), 1); + + let rename = engine + .rename_symbol(position.clone(), "renamed".to_owned()) + .await?; + assert!(rename.files_touched >= 1); + assert!(rename.edits_applied >= 1); + + let diagnostics = engine.diagnostics(fixture.source_file_path()?).await?; + assert_eq!(diagnostics.diagnostics.len(), 1); + + let snapshot = engine.lifecycle_snapshot().await; + assert!(matches!(snapshot, LifecycleSnapshot::Ready { .. })); + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn stable_fake_server_reports_success_telemetry() -> Result<(), Box<dyn Error>> { + let fixture = make_fixture()?; + let config = make_engine_config(&fixture, vec!["--mode".into(), "stable".into()])?; + let engine = Engine::new(config); + let position = fixture.position()?; + + let _hover = engine.hover(position.clone()).await?; + let _definition = engine.definition(position.clone()).await?; + let _references = engine.references(position.clone()).await?; + let _diagnostics = engine.diagnostics(fixture.source_file_path()?).await?; + + let telemetry = engine.telemetry_snapshot().await; + assert_eq!(telemetry.totals.request_count, 4); + assert_eq!(telemetry.totals.success_count, 4); + assert_eq!(telemetry.totals.response_error_count, 0); + assert_eq!(telemetry.totals.transport_fault_count, 0); + assert_eq!(telemetry.totals.retry_count, 0); + assert_eq!(telemetry.restart_count, 0); + assert!(telemetry.last_fault.is_none()); + assert_eq!(telemetry.consecutive_failures, 0); + + assert_method_counts( + telemetry.methods.as_slice(), + "textDocument/hover", + MethodExpectation::new(1, 1, 0, 0, 0), + ); + assert_method_counts( + telemetry.methods.as_slice(), + "textDocument/definition", + MethodExpectation::new(1, 1, 0, 0, 0), + ); + assert_method_counts( + telemetry.methods.as_slice(), + "textDocument/references", + MethodExpectation::new(1, 1, 0, 0, 0), + ); + assert_method_counts( + telemetry.methods.as_slice(), + "textDocument/diagnostic", + MethodExpectation::new(1, 1, 0, 0, 0), + ); + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn diagnostics_retry_server_cancelled_response() -> Result<(), Box<dyn Error>> { + let fixture = make_fixture()?; + let config = make_engine_config( + &fixture, + vec![ + "--mode".into(), + "stable".into(), + "--diagnostic-cancel-count".into(), + "1".into(), + ], + )?; + let engine = Engine::new(config); + + let diagnostics = engine.diagnostics(fixture.source_file_path()?).await?; + assert_eq!(diagnostics.diagnostics.len(), 1); + + let telemetry = engine.telemetry_snapshot().await; + assert_eq!(telemetry.totals.request_count, 2); + assert_eq!(telemetry.totals.success_count, 1); + assert_eq!(telemetry.totals.response_error_count, 1); + assert_eq!(telemetry.totals.transport_fault_count, 0); + assert_eq!(telemetry.totals.retry_count, 1); + assert_eq!(telemetry.restart_count, 0); + assert_eq!(telemetry.consecutive_failures, 0); + assert_method_counts( + telemetry.methods.as_slice(), + "textDocument/diagnostic", + MethodExpectation::new(2, 1, 1, 0, 1), + ); + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn engine_recovers_after_first_hover_crash() -> Result<(), Box<dyn Error>> { + let fixture = make_fixture()?; + let marker = fixture.path().join("crash-marker"); + let args = vec![ + "--mode".into(), + "crash_on_first_hover".into(), + "--crash-marker".into(), + marker.display().to_string(), + ]; + let config = make_engine_config(&fixture, args)?; + let engine = Engine::new(config); + + let hover = engine.hover(fixture.position()?).await?; + assert_eq!(hover.rendered.as_deref(), Some("hover::ok")); + assert!(marker.exists()); + + let snapshot = engine.lifecycle_snapshot().await; + let generation = if let LifecycleSnapshot::Ready { generation } = snapshot { + generation.get() + } else { + return Err("expected ready lifecycle snapshot after successful recovery".into()); + }; + assert!(generation >= 2); + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn crash_recovery_records_transport_fault_retry_and_restart() -> Result<(), Box<dyn Error>> { + let fixture = make_fixture()?; + let marker = fixture.path().join("crash-marker"); + let args = vec![ + "--mode".into(), + "crash_on_first_hover".into(), + "--crash-marker".into(), + marker.display().to_string(), + ]; + let config = make_engine_config(&fixture, args)?; + let engine = Engine::new(config); + + let hover = engine.hover(fixture.position()?).await?; + assert_eq!(hover.rendered.as_deref(), Some("hover::ok")); + + let telemetry = engine.telemetry_snapshot().await; + assert_eq!(telemetry.totals.request_count, 2); + assert_eq!(telemetry.totals.success_count, 1); + assert_eq!(telemetry.totals.response_error_count, 0); + assert_eq!(telemetry.totals.transport_fault_count, 1); + assert_eq!(telemetry.totals.retry_count, 1); + assert_eq!(telemetry.restart_count, 1); + assert_eq!(telemetry.consecutive_failures, 0); + assert!(telemetry.last_fault.is_some()); + assert_method_counts( + telemetry.methods.as_slice(), + "textDocument/hover", + MethodExpectation::new(2, 1, 0, 1, 1), + ); + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +#[serial] +async fn response_error_requests_are_telemetered() -> Result<(), Box<dyn Error>> { + let fixture = make_fixture()?; + let config = make_engine_config(&fixture, vec!["--mode".into(), "stable".into()])?; + let engine = Engine::new(config); + + let invalid = engine + .raw_lsp_request("textDocument/notReal", json!({})) + .await; + match invalid { + Err(EngineError::LspResponse { .. }) => {} + other => return Err(format!("expected LSP response error, got {other:?}").into()), + } + + let telemetry = engine.telemetry_snapshot().await; + assert_eq!(telemetry.totals.request_count, 1); + assert_eq!(telemetry.totals.success_count, 0); + assert_eq!(telemetry.totals.response_error_count, 1); + assert_eq!(telemetry.totals.transport_fault_count, 0); + assert_eq!(telemetry.totals.retry_count, 0); + assert_eq!(telemetry.restart_count, 0); + assert_method_counts( + telemetry.methods.as_slice(), + "textDocument/notReal", + MethodExpectation::new(1, 0, 1, 0, 0), + ); + + Ok(()) +} + +#[derive(Debug, Clone, Copy)] +struct MethodExpectation { + request_count: u64, + success_count: u64, + response_error_count: u64, + transport_fault_count: u64, + retry_count: u64, +} + +impl MethodExpectation { + const fn new( + request_count: u64, + success_count: u64, + response_error_count: u64, + transport_fault_count: u64, + retry_count: u64, + ) -> Self { + Self { + request_count, + success_count, + response_error_count, + transport_fault_count, + retry_count, + } + } +} + +fn assert_method_counts( + methods: &[ra_mcp_engine::MethodTelemetrySnapshot], + method: &str, + expected: MethodExpectation, +) { + let maybe_entry = methods.iter().find(|entry| entry.method == method); + assert!( + maybe_entry.is_some(), + "expected telemetry entry for method `{method}`", + ); + let entry = if let Some(value) = maybe_entry { + value + } else { + return; + }; + assert_eq!(entry.request_count, expected.request_count); + assert_eq!(entry.success_count, expected.success_count); + assert_eq!(entry.response_error_count, expected.response_error_count); + assert_eq!(entry.transport_fault_count, expected.transport_fault_count); + assert_eq!(entry.retry_count, expected.retry_count); +} + +struct Fixture { + temp_dir: TempDir, +} + +impl Fixture { + fn path(&self) -> &std::path::Path { + self.temp_dir.path() + } + + fn source_file_path(&self) -> Result<SourceFilePath, Box<dyn Error>> { + let path = self.path().join("src").join("lib.rs"); + SourceFilePath::try_new(path).map_err(|error| error.to_string().into()) + } + + fn position(&self) -> Result<SourcePosition, Box<dyn Error>> { + let line = OneIndexedLine::try_new(1).map_err(|error| error.to_string())?; + let column = OneIndexedColumn::try_new(1).map_err(|error| error.to_string())?; + Ok(SourcePosition::new( + self.source_file_path()?, + SourcePoint::new(line, column), + )) + } +} + +fn make_fixture() -> Result<Fixture, Box<dyn Error>> { + let temp_dir = tempfile::tempdir()?; + let src_dir = temp_dir.path().join("src"); + fs::create_dir_all(&src_dir)?; + fs::write( + temp_dir.path().join("Cargo.toml"), + "[package]\nname = \"fixture\"\nversion = \"0.0.0\"\nedition = \"2024\"\n", + )?; + fs::write(src_dir.join("lib.rs"), "pub fn touch() -> i32 { 1 }\n")?; + Ok(Fixture { temp_dir }) +} + +fn make_engine_config( + fixture: &Fixture, + args: Vec<String>, +) -> Result<EngineConfig, Box<dyn Error>> { + let workspace_root = + WorkspaceRoot::try_new(fixture.path().to_path_buf()).map_err(|error| error.to_string())?; + let binary = fake_rust_analyzer_binary()?; + let backoff = BackoffPolicy::try_new(Duration::from_millis(5), Duration::from_millis(20)) + .map_err(|error| error.to_string())?; + EngineConfig::try_new( + workspace_root, + binary, + args, + Vec::new(), + Duration::from_secs(2), + Duration::from_secs(2), + backoff, + ) + .map_err(|error| error.to_string().into()) +} + +fn fake_rust_analyzer_binary() -> Result<PathBuf, Box<dyn Error>> { + if let Ok(path) = std::env::var("CARGO_BIN_EXE_fake-rust-analyzer") { + return Ok(PathBuf::from(path)); + } + if let Ok(path) = std::env::var("CARGO_BIN_EXE_fake_rust_analyzer") { + return Ok(PathBuf::from(path)); + } + let current = std::env::current_exe()?; + let deps_dir = current + .parent() + .ok_or_else(|| "failed to resolve test binary parent".to_owned())?; + let debug_dir = deps_dir + .parent() + .ok_or_else(|| "failed to resolve target debug directory".to_owned())?; + Ok(debug_dir.join("fake-rust-analyzer")) +} diff --git a/docs/mcp-spine-plan.md b/docs/mcp-spine-plan.md new file mode 100644 index 0000000..3b03ca7 --- /dev/null +++ b/docs/mcp-spine-plan.md @@ -0,0 +1,12 @@ +# libmcp Note + +The original extraction plan for the shared MCP spine has been promoted into +the canonical `libmcp` spec. + +Current source of truth: + +- the canonical `libmcp` repository (`https://git.swarm.moe/libmcp.git`), in + `docs/spec.md` + +The `$mcp-bootstrap` skill is also now canonically owned by `libmcp` and should +be installed from that repository rather than maintained independently here. diff --git a/docs/qa-checklist.md b/docs/qa-checklist.md new file mode 100644 index 0000000..fea0719 --- /dev/null +++ b/docs/qa-checklist.md @@ -0,0 +1,119 @@ +# MCP QA Checklist + +This is the repo-local smoke checklist for `adequate-rust-mcp`. + +It is intentionally narrow: + +- run against this repository +- omit destructive calls +- accept on two axes only: + - no error + - model vibe check: yes/no + +There are no brittle output regexes. The point is to exercise the live MCP surface, +inspect short previews, and answer "does this feel right?". + +## Run + +```bash +python3 qa_checklist.py +``` + +The runner: + +- builds `adequate-rust-mcp` +- launches the default host mode +- initializes an MCP session +- executes the checklist in order +- prints one short preview per step for the vibe pass + +## Acceptance + +Each step is judged on: + +1. `error`: did the JSON-RPC call complete without a request/tool error? +2. `vibe`: does the preview look legible, model-friendly, and semantically sufficient? + +The script enforces only `error`. The `vibe` call is human/model judgment. + +## Targets + +The checklist uses these repo-local files: + +- `crates/adequate-rust-mcp/src/main.rs` +- `crates/adequate-rust-mcp/src/worker/mod.rs` + +Important positions in `main.rs`: + +- `parse_launch_mode` call: `27:16` in the typed one-indexed tools, `26:15` in raw LSP zero-indexed positions +- `run_host`: `30:35` in the typed one-indexed tools, `29:34` in raw LSP zero-indexed positions +- `mode`: `29:11` in the typed one-indexed tools, `28:10` in raw LSP zero-indexed positions +- completion probe after `args_os().`: `27:54` one-indexed, `26:53` zero-indexed +- signature-help probe inside `io::Error::new(...)`: `28:42` one-indexed, `27:41` zero-indexed + +## Checklist + +### Session Setup + +1. `initialize` + - params: + - `protocolVersion: "2025-11-25"` + - `capabilities: {}` + - `clientInfo: { name: "adequate-qa-checklist", version: "1.0.0" }` + - then send `notifications/initialized {}` +2. `tools/list {}` +3. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/documentSymbol", params: { textDocument: { uri: main_uri } } })` + +### Warm-Up + +1. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/prepareRename", params: { textDocument: { uri: main_uri }, position: { line: 28, character: 10 } } })` +2. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/completion", params: { textDocument: { uri: main_uri }, position: { line: 26, character: 53 } } })` +3. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/signatureHelp", params: { textDocument: { uri: main_uri }, position: { line: 27, character: 41 } } })` +4. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/documentHighlight", params: { textDocument: { uri: main_uri }, position: { line: 28, character: 10 } } })` +5. `tools/call(name="health_snapshot", arguments={})` + - expected shape: short porcelain line, not JSON + +### Core Typed Tools + +1. `tools/call(name="hover", arguments={ file_path: "crates/adequate-rust-mcp/src/main.rs", line: 27, column: 16 })` +2. `tools/call(name="definition", arguments={ file_path: "crates/adequate-rust-mcp/src/main.rs", line: 27, column: 16 })` +3. `tools/call(name="references", arguments={ file_path: "crates/adequate-rust-mcp/src/main.rs", line: 27, column: 16 })` + +These typed read-only tools default to porcelain text; use `render: "json"` only when +you explicitly want structured payloads for assertions or downstream processing. + +### Diagnostics Surface + +1. `tools/call(name="diagnostics", arguments={ file_path: "<absolute path to crates/adequate-rust-mcp/src/main.rs>" })` +2. `tools/call(name="diagnostics", arguments={ file_path: "<absolute path to crates/adequate-rust-mcp/src/main.rs>", render: "json" })` +3. `tools/call(name="diagnostics", arguments={ file_path: "<absolute path to crates/adequate-rust-mcp/src/main.rs>", mode: "full", render: "json" })` +4. `tools/call(name="diagnostics", arguments={ file_paths: ["<absolute path to crates/adequate-rust-mcp/src/main.rs>", "<absolute path to crates/adequate-rust-mcp/src/worker/mod.rs>"], render: "json" })` +5. `tools/call(name="diagnostics", arguments={ file_path: "<absolute path to crates/adequate-rust-mcp/src/main.rs>", path_style: "relative" })` +6. `tools/call(name="diagnostics", arguments={ file_path: "<file:// URI for crates/adequate-rust-mcp/src/main.rs>" })` +7. `tools/call(name="diagnostics", arguments={ file_path: "crates/adequate-rust-mcp/src/main.rs" })` + +### Clippy Surface + +1. `tools/call(name="clippy_diagnostics", arguments={ file_path: "<absolute path to crates/adequate-rust-mcp/src/main.rs>" })` +2. `tools/call(name="clippy_diagnostics", arguments={ file_path: "<absolute path to crates/adequate-rust-mcp/src/main.rs>", render: "json" })` +3. `tools/call(name="clippy_diagnostics", arguments={ file_path: "<absolute path to crates/adequate-rust-mcp/src/main.rs>", mode: "full", render: "json" })` +4. `tools/call(name="clippy_diagnostics", arguments={ file_paths: ["<absolute path to crates/adequate-rust-mcp/src/main.rs>", "<absolute path to crates/adequate-rust-mcp/src/worker/mod.rs>"], render: "json" })` + +### Advanced LSP Safe Calls + +All raw LSP positions here are zero-indexed. + +1. `tools/call(name="advanced_lsp_request", arguments={ method: "workspace/symbol", params: { query: "LaunchMode" } })` +2. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/definition", params: "{\"textDocument\":{\"uri\":\"main_uri\"},\"position\":{\"line\":29,\"character\":34}}" })` +3. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/foldingRange", params: { textDocument: { uri: main_uri } } })` +4. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/selectionRange", params: { textDocument: { uri: main_uri }, positions: [{ line: 29, character: 34 }] } })` +5. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/inlayHint", params: { textDocument: { uri: main_uri }, range: { start: { line: 0, character: 0 }, end: { line: 40, character: 0 } } } })` +6. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/diagnostic", params: { textDocument: { uri: main_uri } } })` +7. `tools/call(name="advanced_lsp_request", arguments={ method: "textDocument/semanticTokens/range", params: { textDocument: { uri: main_uri }, range: { start: { line: 26, character: 0 }, end: { line: 31, character: 80 } } } })` + +## Explicit Omissions + +- `rename_symbol` +- `fix_everything` +- `advanced_lsp_request` methods with side effects such as `workspace/executeCommand` +- formatting/code-action flows that could rewrite files diff --git a/qa_checklist.py b/qa_checklist.py new file mode 100644 index 0000000..d1502d0 --- /dev/null +++ b/qa_checklist.py @@ -0,0 +1,648 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import json +import os +import select +import shutil +import subprocess +import sys +import time +from dataclasses import dataclass +from pathlib import Path +from typing import Any + +ROOT = Path(__file__).resolve().parent +BINARY = ROOT / "target" / "debug" / "adequate-rust-mcp" +DEFAULT_TIMEOUT_S = 60.0 +CLIPPY_TIMEOUT_S = 180.0 +TRANSIENT_RETRIES = 4 +TRANSIENT_BACKOFF_S = 0.25 +PREVIEW_LIMIT = 220 + +MAIN_REL = Path("crates/adequate-rust-mcp/src/main.rs") +WORKER_MOD_REL = Path("crates/adequate-rust-mcp/src/worker/mod.rs") +MAIN_PATH = ROOT / MAIN_REL +WORKER_MOD_PATH = ROOT / WORKER_MOD_REL +MAIN_URI = MAIN_PATH.resolve().as_uri() + +JsonValue = Any + + +@dataclass(frozen=True) +class QaStep: + label: str + method: str + params: dict[str, JsonValue] + timeout_s: float = DEFAULT_TIMEOUT_S + transient_retry: bool = False + send_initialized_notification: bool = False + + +class JsonRpcFailure(RuntimeError): + def __init__(self, method: str, error: JsonValue) -> None: + self.method = method + self.error = error + super().__init__(f"json-rpc error for `{method}`: {compact_json(error)}") + + +class ToolPayloadFailure(RuntimeError): + def __init__(self, tool_name: str, payload: JsonValue) -> None: + self.tool_name = tool_name + self.payload = payload + super().__init__(f"tool `{tool_name}` returned error payload: {compact_json(payload)}") + + +class McpSession: + def __init__(self, proc: subprocess.Popen[str]) -> None: + self.proc = proc + self.stdin = proc.stdin + self.stdout = proc.stdout + self.next_id = 1 + self.buffered_responses: dict[int, JsonValue] = {} + + @classmethod + def spawn(cls) -> McpSession: + env = os.environ.copy() + env["ADEQUATE_MCP_WORKSPACE_ROOT"] = str(ROOT) + proc = subprocess.Popen( + [str(BINARY)], + cwd=ROOT, + env=env, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + text=True, + bufsize=1, + ) + if proc.stdin is None or proc.stdout is None: + raise RuntimeError("failed to capture host stdio") + return cls(proc) + + def close(self) -> None: + if self.proc.poll() is None: + self.proc.kill() + self.proc.wait(timeout=5) + + def notify(self, method: str, params: dict[str, JsonValue]) -> None: + payload = { + "jsonrpc": "2.0", + "method": method, + "params": params, + } + self._write_frame(payload) + + def request( + self, + method: str, + params: dict[str, JsonValue], + *, + timeout_s: float, + ) -> JsonValue: + request_id = self.next_id + self.next_id += 1 + payload = { + "jsonrpc": "2.0", + "id": request_id, + "method": method, + "params": params, + } + self._write_frame(payload) + response = self._read_response(request_id, timeout_s=timeout_s) + if "error" in response: + raise JsonRpcFailure(method, response["error"]) + if "result" not in response: + raise RuntimeError(f"json-rpc response for `{method}` missing result") + return response["result"] + + def _write_frame(self, payload: dict[str, JsonValue]) -> None: + if self.stdin is None: + raise RuntimeError("server stdin is closed") + self.stdin.write(json.dumps(payload, separators=(",", ":")) + "\n") + self.stdin.flush() + + def _read_response(self, request_id: int, *, timeout_s: float) -> JsonValue: + buffered = self.buffered_responses.pop(request_id, None) + if buffered is not None: + return buffered + + deadline = time.monotonic() + timeout_s + while True: + remaining = deadline - time.monotonic() + if remaining <= 0: + raise TimeoutError(f"timed out waiting for response id {request_id}") + if self.stdout is None: + raise RuntimeError("server stdout is closed") + ready, _, _ = select.select([self.stdout], [], [], remaining) + if not ready: + raise TimeoutError(f"timed out waiting for response id {request_id}") + line = self.stdout.readline() + if line == "": + raise RuntimeError("server stdout closed while awaiting response") + try: + message = json.loads(line) + except json.JSONDecodeError: + continue + response_id = message.get("id") + if not isinstance(response_id, int): + continue + if response_id == request_id: + return message + self.buffered_responses[response_id] = message + + +def compact_json(value: JsonValue) -> str: + return json.dumps(value, separators=(",", ":"), sort_keys=True) + + +def truncate(text: str, limit: int = PREVIEW_LIMIT) -> str: + normalized = " ".join(text.split()) + if len(normalized) <= limit: + return normalized + return normalized[: limit - 3] + "..." + + +def range_brief(range_payload: JsonValue) -> str | None: + if not isinstance(range_payload, dict): + return None + start = range_payload.get("start") + end = range_payload.get("end") + if not isinstance(start, dict) or not isinstance(end, dict): + return None + start_line = start.get("line") + start_column = start.get("column") + end_line = end.get("line") + end_column = end.get("column") + if not all(isinstance(value, int) for value in (start_line, start_column, end_line, end_column)): + return None + return f"{start_line}:{start_column}-{end_line}:{end_column}" + + +def location_brief(location: JsonValue) -> str | None: + if not isinstance(location, dict): + return None + file_path = location.get("file_path") + line = location.get("line") + column = location.get("column") + if not isinstance(file_path, str) or not isinstance(line, int) or not isinstance(column, int): + return None + return f"{Path(file_path).name}:{line}:{column}" + + +def hover_preview(payload: dict[str, JsonValue]) -> str: + rendered = payload.get("rendered") + range_text = range_brief(payload.get("range")) + if isinstance(rendered, str) and rendered.strip(): + prefix = f"{range_text} " if range_text else "" + return truncate(prefix + rendered) + return truncate(compact_json(payload)) + + +def locations_preview(payload: dict[str, JsonValue]) -> str: + locations = payload.get("locations") + if not isinstance(locations, list): + return truncate(compact_json(payload)) + samples = [location_brief(location) for location in locations[:3]] + visible = [sample for sample in samples if sample is not None] + joined = ", ".join(visible) + suffix = f": {joined}" if joined else "" + return truncate(f"{len(locations)} location(s){suffix}") + + +def advanced_preview(payload: dict[str, JsonValue]) -> str: + result = payload.get("result") + if isinstance(result, list): + names = [ + item.get("name") + for item in result[:3] + if isinstance(item, dict) and isinstance(item.get("name"), str) + ] + if names: + return truncate(f"{len(result)} item(s): {', '.join(names)}") + return truncate(f"{len(result)} item(s)") + if isinstance(result, dict): + if "data" in result and isinstance(result.get("data"), list): + return truncate(f"semantic tokens: {len(result['data'])} integers") + if "items" in result and "kind" in result: + items = result.get("items") + item_count = len(items) if isinstance(items, list) else 0 + kind = result.get("kind") + return truncate(f"document diagnostic kind={kind} items={item_count}") + if "isIncomplete" in result and isinstance(result.get("items"), list): + return truncate( + f"completion incomplete={result['isIncomplete']} items={len(result['items'])}" + ) + if "signatures" in result and isinstance(result.get("signatures"), list): + return truncate( + f"signature help signatures={len(result['signatures'])} active={result.get('activeSignature')}" + ) + if {"start", "end"} <= result.keys(): + brief = range_brief(result) + if brief is not None: + return brief + return truncate(compact_json(payload)) + + +def extract_tool_payload(result: JsonValue) -> JsonValue: + if not isinstance(result, dict): + return result + if result.get("isError") is True: + tool_name = "<unknown>" + raise ToolPayloadFailure(tool_name, result) + structured = result.get("structuredContent") + if structured is not None: + return structured + content = result.get("content") + if isinstance(content, list) and content: + first = content[0] + if isinstance(first, dict): + text = first.get("text") + if isinstance(text, str): + return text + return result + + +def preview_result(step: QaStep, result: JsonValue) -> str: + if step.method == "initialize": + if isinstance(result, dict): + preview = { + "protocolVersion": result.get("protocolVersion"), + "serverInfo": result.get("serverInfo"), + } + return truncate(compact_json(preview)) + return truncate(compact_json(result)) + + if step.method == "tools/list": + tools = result.get("tools") if isinstance(result, dict) else None + if isinstance(tools, list): + names = [ + tool.get("name") + for tool in tools + if isinstance(tool, dict) and isinstance(tool.get("name"), str) + ] + joined = ", ".join(names) + return truncate(f"{len(names)} tools: {joined}") + return truncate(compact_json(result)) + + if step.method == "tools/call": + tool_name = step.params.get("name", "<tool>") + payload = extract_tool_payload(result) + if isinstance(payload, str): + return truncate(payload) + if isinstance(payload, dict) and tool_name == "hover": + return hover_preview(payload) + if isinstance(payload, dict) and tool_name in {"definition", "references"}: + return locations_preview(payload) + if isinstance(payload, dict) and tool_name == "advanced_lsp_request": + return advanced_preview(payload) + if tool_name == "health_snapshot" and isinstance(payload, dict): + preview = { + "state": payload.get("state"), + "generation": payload.get("generation"), + "restart_count": payload.get("restart_count"), + "consecutive_failures": payload.get("consecutive_failures"), + } + return truncate(compact_json(preview)) + return truncate(compact_json(payload)) + + return truncate(compact_json(result)) + + +def is_transient_failure(error: BaseException) -> bool: + if isinstance(error, JsonRpcFailure): + error_object = error.error if isinstance(error.error, dict) else {} + data = error_object.get("data") + if isinstance(data, dict): + if data.get("retryable") is True: + return True + if data.get("kind") == "transient_retryable": + return True + code = error_object.get("code") + if code == -32801: + return True + message = error_object.get("message") + if isinstance(message, str): + lowered = message.lower() + if "content modified" in lowered or "document changed" in lowered: + return True + text = str(error).lower() + return "\"kind\":\"transient_retryable\"" in text or "content modified" in text or "document changed" in text + + +def tool_step( + label: str, + tool_name: str, + arguments: dict[str, JsonValue], + *, + timeout_s: float = DEFAULT_TIMEOUT_S, +) -> QaStep: + return QaStep( + label=label, + method="tools/call", + params={"name": tool_name, "arguments": arguments}, + timeout_s=timeout_s, + transient_retry=True, + ) + + +def build_steps() -> list[QaStep]: + multi_file_args = { + "file_paths": [str(MAIN_PATH), str(WORKER_MOD_PATH)], + "render": "json", + } + definition_string_params = json.dumps( + { + "textDocument": {"uri": MAIN_URI}, + "position": {"line": 29, "character": 34}, + }, + separators=(",", ":"), + ) + return [ + QaStep( + label="initialize", + method="initialize", + params={ + "protocolVersion": "2025-11-25", + "capabilities": {}, + "clientInfo": { + "name": "adequate-qa-checklist", + "version": "1.0.0", + }, + }, + send_initialized_notification=True, + ), + QaStep(label="tools/list", method="tools/list", params={}), + tool_step( + "advanced documentSymbol warm-up", + "advanced_lsp_request", + { + "method": "textDocument/documentSymbol", + "params": {"textDocument": {"uri": MAIN_URI}}, + }, + ), + tool_step( + "advanced prepareRename warm-up", + "advanced_lsp_request", + { + "method": "textDocument/prepareRename", + "params": { + "textDocument": {"uri": MAIN_URI}, + "position": {"line": 28, "character": 10}, + }, + }, + ), + tool_step( + "advanced completion warm-up", + "advanced_lsp_request", + { + "method": "textDocument/completion", + "params": { + "textDocument": {"uri": MAIN_URI}, + "position": {"line": 26, "character": 53}, + }, + }, + ), + tool_step( + "advanced signatureHelp warm-up", + "advanced_lsp_request", + { + "method": "textDocument/signatureHelp", + "params": { + "textDocument": {"uri": MAIN_URI}, + "position": {"line": 27, "character": 41}, + }, + }, + ), + tool_step( + "advanced documentHighlight warm-up", + "advanced_lsp_request", + { + "method": "textDocument/documentHighlight", + "params": { + "textDocument": {"uri": MAIN_URI}, + "position": {"line": 28, "character": 10}, + }, + }, + ), + tool_step("health_snapshot", "health_snapshot", {}), + tool_step( + "hover main.rs:27:16", + "hover", + {"file_path": str(MAIN_REL), "line": 27, "column": 16}, + ), + tool_step( + "definition main.rs:27:16", + "definition", + {"file_path": str(MAIN_REL), "line": 27, "column": 16}, + ), + tool_step( + "references main.rs:27:16", + "references", + {"file_path": str(MAIN_REL), "line": 27, "column": 16}, + ), + tool_step( + "diagnostics default porcelain", + "diagnostics", + {"file_path": str(MAIN_PATH)}, + ), + tool_step( + "diagnostics render=json", + "diagnostics", + {"file_path": str(MAIN_PATH), "render": "json"}, + ), + tool_step( + "diagnostics mode=full render=json", + "diagnostics", + {"file_path": str(MAIN_PATH), "mode": "full", "render": "json"}, + ), + tool_step("diagnostics multi-file fused", "diagnostics", multi_file_args), + tool_step( + "diagnostics path_style=relative", + "diagnostics", + {"file_path": str(MAIN_PATH), "path_style": "relative"}, + ), + tool_step( + "diagnostics file URI", + "diagnostics", + {"file_path": MAIN_URI}, + ), + tool_step( + "diagnostics workspace-relative path", + "diagnostics", + {"file_path": str(MAIN_REL)}, + ), + tool_step( + "clippy_diagnostics default porcelain", + "clippy_diagnostics", + {"file_path": str(MAIN_PATH)}, + timeout_s=CLIPPY_TIMEOUT_S, + ), + tool_step( + "clippy_diagnostics render=json", + "clippy_diagnostics", + {"file_path": str(MAIN_PATH), "render": "json"}, + timeout_s=CLIPPY_TIMEOUT_S, + ), + tool_step( + "clippy_diagnostics mode=full render=json", + "clippy_diagnostics", + {"file_path": str(MAIN_PATH), "mode": "full", "render": "json"}, + timeout_s=CLIPPY_TIMEOUT_S, + ), + tool_step( + "clippy_diagnostics multi-file fused", + "clippy_diagnostics", + multi_file_args, + timeout_s=CLIPPY_TIMEOUT_S, + ), + tool_step( + "advanced workspace/symbol", + "advanced_lsp_request", + {"method": "workspace/symbol", "params": {"query": "LaunchMode"}}, + ), + tool_step( + "advanced definition via stringified params", + "advanced_lsp_request", + { + "method": "textDocument/definition", + "params": definition_string_params, + }, + ), + tool_step( + "advanced foldingRange", + "advanced_lsp_request", + { + "method": "textDocument/foldingRange", + "params": {"textDocument": {"uri": MAIN_URI}}, + }, + ), + tool_step( + "advanced selectionRange", + "advanced_lsp_request", + { + "method": "textDocument/selectionRange", + "params": { + "textDocument": {"uri": MAIN_URI}, + "positions": [{"line": 29, "character": 34}], + }, + }, + ), + tool_step( + "advanced inlayHint", + "advanced_lsp_request", + { + "method": "textDocument/inlayHint", + "params": { + "textDocument": {"uri": MAIN_URI}, + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 40, "character": 0}, + }, + }, + }, + ), + tool_step( + "advanced documentDiagnostic", + "advanced_lsp_request", + { + "method": "textDocument/diagnostic", + "params": {"textDocument": {"uri": MAIN_URI}}, + }, + ), + tool_step( + "advanced semanticTokens/range", + "advanced_lsp_request", + { + "method": "textDocument/semanticTokens/range", + "params": { + "textDocument": {"uri": MAIN_URI}, + "range": { + "start": {"line": 26, "character": 0}, + "end": {"line": 31, "character": 80}, + }, + }, + }, + ), + ] + + +def ensure_prerequisites() -> None: + if not MAIN_PATH.is_file(): + raise SystemExit(f"missing target file: {MAIN_PATH}") + if not WORKER_MOD_PATH.is_file(): + raise SystemExit(f"missing target file: {WORKER_MOD_PATH}") + + ra_binary = os.environ.get("ADEQUATE_MCP_RA_BINARY", "rust-analyzer") + if shutil.which(ra_binary) is None: + raise SystemExit( + f"rust-analyzer binary `{ra_binary}` not found on PATH; set ADEQUATE_MCP_RA_BINARY if needed" + ) + + +def build_binary() -> None: + command = ["cargo", "build", "-q", "-p", "adequate-rust-mcp"] + print("+", " ".join(command), flush=True) + proc = subprocess.run(command, cwd=ROOT) + if proc.returncode != 0: + raise SystemExit(proc.returncode) + if not BINARY.is_file(): + raise SystemExit(f"expected built binary at {BINARY}") + + +def run_step(session: McpSession, step: QaStep) -> JsonValue: + attempt = 0 + while True: + attempt += 1 + try: + result = session.request(step.method, step.params, timeout_s=step.timeout_s) + if step.send_initialized_notification: + session.notify("notifications/initialized", {}) + if step.method == "tools/call": + tool_name = step.params.get("name", "<tool>") + if isinstance(result, dict) and result.get("isError") is True: + raise ToolPayloadFailure(str(tool_name), result) + return result + except BaseException as error: + if step.transient_retry and attempt < TRANSIENT_RETRIES and is_transient_failure(error): + time.sleep(TRANSIENT_BACKOFF_S) + continue + raise + + +def main() -> None: + ensure_prerequisites() + build_binary() + steps = build_steps() + failures: list[tuple[QaStep, BaseException]] = [] + + session = McpSession.spawn() + try: + for index, step in enumerate(steps, start=1): + print(f"[{index:02d}/{len(steps):02d}] {step.label}", flush=True) + try: + result = run_step(session, step) + except BaseException as error: + failures.append((step, error)) + print(f" error: {error}", flush=True) + continue + print(f" preview: {preview_result(step, result)}", flush=True) + finally: + session.close() + + print() + print(f"completed {len(steps)} checklist steps") + if failures: + print(f"{len(failures)} step(s) failed:") + for step, error in failures: + print(f"- {step.label}: {error}") + raise SystemExit(1) + + print("all steps returned without error") + print("manual vibe check: inspect the previews above and answer yes/no") + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + raise SystemExit(130) |